a large semi-rewrite of fs logic

This commit is contained in:
fyears 2024-04-27 02:27:24 +08:00
parent 5ce350ba41
commit f33fa26c03
28 changed files with 3507 additions and 3763 deletions

View File

@ -36,6 +36,7 @@ esbuild
"net",
"http",
"https",
"vm",
// ...builtins
],
inject: ["./esbuild.injecthelper.mjs"],

View File

@ -16,7 +16,9 @@
"process": "process/browser",
"stream": "stream-browserify",
"crypto": "crypto-browserify",
"url": "url/"
"url": "url/",
"fs": false,
"vm": false
},
"source": "main.ts",
"keywords": [],
@ -59,7 +61,7 @@
"@aws-sdk/lib-storage": "^3.474.0",
"@aws-sdk/signature-v4-crt": "^3.474.0",
"@aws-sdk/types": "^3.468.0",
"@azure/msal-node": "^2.6.0",
"@azure/msal-node": "^2.7.0",
"@fyears/rclone-crypt": "^0.0.7",
"@fyears/tsqueue": "^1.0.1",
"@microsoft/microsoft-graph-client": "^3.0.7",

View File

@ -1,215 +0,0 @@
import { CipherMethodType } from "./baseTypes";
import * as openssl from "./encryptOpenSSL";
import * as rclone from "./encryptRClone";
import { isVaildText } from "./misc";
export class Cipher {
readonly password: string;
readonly method: CipherMethodType;
cipherRClone?: rclone.CipherRclone;
constructor(password: string, method: CipherMethodType) {
this.password = password ?? "";
this.method = method;
if (method === "rclone-base64") {
this.cipherRClone = new rclone.CipherRclone(password, 5);
}
}
closeResources() {
if (this.method === "rclone-base64" && this.cipherRClone !== undefined) {
this.cipherRClone.closeResources();
}
}
isPasswordEmpty() {
return this.password === "";
}
isFolderAware() {
if (this.method === "openssl-base64") {
return false;
}
if (this.method === "rclone-base64") {
return true;
}
throw Error(`no idea about isFolderAware for method=${this.method}`);
}
async encryptContent(content: ArrayBuffer) {
// console.debug("start encryptContent");
if (this.password === "") {
return content;
}
if (this.method === "openssl-base64") {
const res = await openssl.encryptArrayBuffer(content, this.password);
if (res === undefined) {
throw Error(`cannot encrypt content`);
}
return res;
} else if (this.method === "rclone-base64") {
const res =
await this.cipherRClone!.encryptContentByCallingWorker(content);
if (res === undefined) {
throw Error(`cannot encrypt content`);
}
return res;
} else {
throw Error(`not supported encrypt method=${this.method}`);
}
}
async decryptContent(content: ArrayBuffer) {
// console.debug("start decryptContent");
if (this.password === "") {
return content;
}
if (this.method === "openssl-base64") {
const res = await openssl.decryptArrayBuffer(content, this.password);
if (res === undefined) {
throw Error(`cannot decrypt content`);
}
return res;
} else if (this.method === "rclone-base64") {
const res =
await this.cipherRClone!.decryptContentByCallingWorker(content);
if (res === undefined) {
throw Error(`cannot decrypt content`);
}
return res;
} else {
throw Error(`not supported decrypt method=${this.method}`);
}
}
async encryptName(name: string) {
// console.debug("start encryptName");
if (this.password === "") {
return name;
}
if (this.method === "openssl-base64") {
const res = await openssl.encryptStringToBase64url(name, this.password);
if (res === undefined) {
throw Error(`cannot encrypt name=${name}`);
}
return res;
} else if (this.method === "rclone-base64") {
const res = await this.cipherRClone!.encryptNameByCallingWorker(name);
if (res === undefined) {
throw Error(`cannot encrypt name=${name}`);
}
return res;
} else {
throw Error(`not supported encrypt method=${this.method}`);
}
}
async decryptName(name: string): Promise<string> {
// console.debug("start decryptName");
if (this.password === "") {
return name;
}
if (this.method === "openssl-base64") {
if (name.startsWith(openssl.MAGIC_ENCRYPTED_PREFIX_BASE32)) {
// backward compitable with the openssl-base32
try {
const res = await openssl.decryptBase32ToString(name, this.password);
if (res !== undefined && isVaildText(res)) {
return res;
} else {
throw Error(`cannot decrypt name=${name}`);
}
} catch (error) {
throw Error(`cannot decrypt name=${name}`);
}
} else if (name.startsWith(openssl.MAGIC_ENCRYPTED_PREFIX_BASE64URL)) {
try {
const res = await openssl.decryptBase64urlToString(
name,
this.password
);
if (res !== undefined && isVaildText(res)) {
return res;
} else {
throw Error(`cannot decrypt name=${name}`);
}
} catch (error) {
throw Error(`cannot decrypt name=${name}`);
}
} else {
throw Error(
`method=${this.method} but the name=${name}, likely mismatch`
);
}
} else if (this.method === "rclone-base64") {
const res = await this.cipherRClone!.decryptNameByCallingWorker(name);
if (res === undefined) {
throw Error(`cannot decrypt name=${name}`);
}
return res;
} else {
throw Error(`not supported decrypt method=${this.method}`);
}
}
getSizeFromOrigToEnc(x: number) {
if (this.password === "") {
return x;
}
if (this.method === "openssl-base64") {
return openssl.getSizeFromOrigToEnc(x);
} else if (this.method === "rclone-base64") {
return rclone.getSizeFromOrigToEnc(x);
} else {
throw Error(`not supported encrypt method=${this.method}`);
}
}
/**
* quick guess, no actual decryption here
* @param name
* @returns
*/
static isLikelyOpenSSLEncryptedName(name: string): boolean {
if (
name.startsWith(openssl.MAGIC_ENCRYPTED_PREFIX_BASE32) ||
name.startsWith(openssl.MAGIC_ENCRYPTED_PREFIX_BASE64URL)
) {
return true;
}
return false;
}
/**
* quick guess, no actual decryption here
* @param name
* @returns
*/
static isLikelyEncryptedName(name: string): boolean {
return Cipher.isLikelyOpenSSLEncryptedName(name);
}
/**
* quick guess, no actual decryption here, only openssl can be guessed here
* @param name
* @returns
*/
static isLikelyEncryptedNameNotMatchMethod(
name: string,
method: CipherMethodType
): boolean {
if (
Cipher.isLikelyOpenSSLEncryptedName(name) &&
method !== "openssl-base64"
) {
return true;
}
if (
!Cipher.isLikelyOpenSSLEncryptedName(name) &&
method === "openssl-base64"
) {
return true;
}
return false;
}
}

19
src/fsAll.ts Normal file
View File

@ -0,0 +1,19 @@
import { Entity } from "./baseTypes";
export abstract class FakeFs {
abstract kind: string;
abstract walk(): Promise<Entity[]>;
abstract stat(key: string): Promise<Entity>;
abstract mkdir(key: string, mtime?: number, ctime?: number): Promise<Entity>;
abstract writeFile(
key: string,
content: ArrayBuffer,
mtime: number,
ctime: number
): Promise<Entity>;
abstract readFile(key: string): Promise<ArrayBuffer>;
abstract rm(key: string): Promise<void>;
abstract checkConnect(callbackFunc?: any): Promise<boolean>;
abstract getUserDisplayName(): Promise<string>;
abstract revokeAuth(): Promise<any>;
}

View File

@ -1,25 +1,21 @@
import { FakeFs } from "./fsAll";
import { Dropbox, DropboxAuth } from "dropbox";
import type { files, DropboxResponseError, DropboxResponse } from "dropbox";
import { Vault } from "obsidian";
import * as path from "path";
import {
DropboxConfig,
Entity,
COMMAND_CALLBACK_DROPBOX,
OAUTH2_FORCE_EXPIRE_MILLISECONDS,
UploadedType,
Entity,
} from "./baseTypes";
import random from "lodash/random";
import {
bufferToArrayBuffer,
delay,
fixEntityListCasesInplace,
getFolderLevels,
getParentFolder,
hasEmojiInText,
headersToRecord,
mkdirpInVault,
} from "./misc";
import { Cipher } from "./encryptUnified";
import { random } from "lodash";
export { Dropbox } from "dropbox";
@ -34,10 +30,7 @@ export const DEFAULT_DROPBOX_CONFIG: DropboxConfig = {
credentialsShouldBeDeletedAtTime: 0,
};
export const getDropboxPath = (
fileOrFolderPath: string,
remoteBaseDir: string
) => {
const getDropboxPath = (fileOrFolderPath: string, remoteBaseDir: string) => {
let key = fileOrFolderPath;
if (fileOrFolderPath === "/" || fileOrFolderPath === "") {
// special
@ -84,20 +77,22 @@ const fromDropboxItemToEntity = (
if (x[".tag"] === "folder") {
return {
key: key,
keyRaw: key,
size: 0,
sizeRaw: 0,
etag: `${x.id}\t`,
} as Entity;
} else if (x[".tag"] === "file") {
const mtimeCli = Date.parse(x.client_modified).valueOf();
const mtimeSvr = Date.parse(x.server_modified).valueOf();
return {
key: key,
keyRaw: key,
mtimeCli: mtimeCli,
mtimeSvr: mtimeSvr,
size: x.size,
sizeRaw: x.size,
hash: x.content_hash,
etag: `${x.id}\t${x.content_hash}`,
} as Entity;
} else {
// x[".tag"] === "deleted"
@ -105,6 +100,132 @@ const fromDropboxItemToEntity = (
}
};
/**
* https://github.com/remotely-save/remotely-save/issues/567
* https://www.dropboxforum.com/t5/Dropbox-API-Support-Feedback/Case-Sensitivity-in-API-2/td-p/191279
* @param entities
*/
export const fixEntityListCasesInplace = (entities: { key?: string }[]) => {
for (const iterator of entities) {
if (iterator.key === undefined) {
throw Error(`dropbox list should all have key, but meet undefined`);
}
}
entities.sort((a, b) => a.key!.length - b.key!.length);
// console.log(JSON.stringify(entities,null,2));
const caseMapping: Record<string, string> = { "": "" };
for (const e of entities) {
// console.log(`looking for: ${JSON.stringify(e, null, 2)}`);
let parentFolder = getParentFolder(e.key!);
if (parentFolder === "/") {
parentFolder = "";
}
const parentFolderLower = parentFolder.toLocaleLowerCase();
const segs = e.key!.split("/");
if (e.key!.endsWith("/")) {
// folder
if (caseMapping.hasOwnProperty(parentFolderLower)) {
const newKey = `${caseMapping[parentFolderLower]}${segs
.slice(-2)
.join("/")}`;
caseMapping[newKey.toLocaleLowerCase()] = newKey;
e.key = newKey;
// console.log(JSON.stringify(caseMapping,null,2));
continue;
} else {
throw Error(`${parentFolder} doesn't have cases record??`);
}
} else {
// file
if (caseMapping.hasOwnProperty(parentFolderLower)) {
const newKey = `${caseMapping[parentFolderLower]}${segs
.slice(-1)
.join("/")}`;
e.key = newKey;
continue;
} else {
throw Error(`${parentFolder} doesn't have cases record??`);
}
}
}
return entities;
};
////////////////////////////////////////////////////////////////////////////////
// Other usual common methods
////////////////////////////////////////////////////////////////////////////////
interface ErrSubType {
error: {
retry_after: number;
};
}
async function retryReq<T>(
reqFunc: () => Promise<DropboxResponse<T>>,
extraHint: string = ""
): Promise<DropboxResponse<T> | undefined> {
const waitSeconds = [1, 2, 4, 8]; // hard code exponential backoff
for (let idx = 0; idx < waitSeconds.length; ++idx) {
try {
if (idx !== 0) {
console.warn(
`${extraHint === "" ? "" : extraHint + ": "}The ${
idx + 1
}-th try starts at time ${Date.now()}`
);
}
return await reqFunc();
} catch (e: unknown) {
const err = e as DropboxResponseError<ErrSubType>;
if (err.status === undefined) {
// then the err is not DropboxResponseError
throw err;
}
if (err.status !== 429) {
// then the err is not "too many requests", give up
throw err;
}
if (idx === waitSeconds.length - 1) {
// the last retry also failed, give up
throw new Error(
`${
extraHint === "" ? "" : extraHint + ": "
}"429 too many requests", after retrying for ${
idx + 1
} times still failed.`
);
}
const headers = headersToRecord(err.headers);
const svrSec =
err.error.error.retry_after ||
parseInt(headers["retry-after"] || "1") ||
1;
const fallbackSec = waitSeconds[idx];
const secMin = Math.max(svrSec, fallbackSec);
const secMax = Math.max(secMin * 1.8, 2);
console.warn(
`${
extraHint === "" ? "" : extraHint + ": "
}We have "429 too many requests" error of ${
idx + 1
}-th try, at time ${Date.now()}, and wait for ${secMin} ~ ${secMax} seconds to retry. Original info: ${JSON.stringify(
err.error,
null,
2
)}`
);
await delay(random(secMin * 1000, secMax * 1000));
}
}
}
////////////////////////////////////////////////////////////////////////////////
// Dropbox authorization using PKCE
// see https://dropbox.tech/developers/pkce--what-and-why-
@ -228,94 +349,33 @@ export const setConfigBySuccessfullAuthInplace = async (
};
////////////////////////////////////////////////////////////////////////////////
// Other usual common methods
// real exported interface
////////////////////////////////////////////////////////////////////////////////
interface ErrSubType {
error: {
retry_after: number;
};
}
async function retryReq<T>(
reqFunc: () => Promise<DropboxResponse<T>>,
extraHint: string = ""
): Promise<DropboxResponse<T> | undefined> {
const waitSeconds = [1, 2, 4, 8]; // hard code exponential backoff
for (let idx = 0; idx < waitSeconds.length; ++idx) {
try {
if (idx !== 0) {
console.warn(
`${extraHint === "" ? "" : extraHint + ": "}The ${
idx + 1
}-th try starts at time ${Date.now()}`
);
}
return await reqFunc();
} catch (e: unknown) {
const err = e as DropboxResponseError<ErrSubType>;
if (err.status === undefined) {
// then the err is not DropboxResponseError
throw err;
}
if (err.status !== 429) {
// then the err is not "too many requests", give up
throw err;
}
if (idx === waitSeconds.length - 1) {
// the last retry also failed, give up
throw new Error(
`${
extraHint === "" ? "" : extraHint + ": "
}"429 too many requests", after retrying for ${
idx + 1
} times still failed.`
);
}
const headers = headersToRecord(err.headers);
const svrSec =
err.error.error.retry_after ||
parseInt(headers["retry-after"] || "1") ||
1;
const fallbackSec = waitSeconds[idx];
const secMin = Math.max(svrSec, fallbackSec);
const secMax = Math.max(secMin * 1.8, 2);
console.warn(
`${
extraHint === "" ? "" : extraHint + ": "
}We have "429 too many requests" error of ${
idx + 1
}-th try, at time ${Date.now()}, and wait for ${secMin} ~ ${secMax} seconds to retry. Original info: ${JSON.stringify(
err.error,
null,
2
)}`
);
await delay(random(secMin * 1000, secMax * 1000));
}
}
}
export class WrappedDropboxClient {
export class FakeFsDropbox extends FakeFs {
kind: "dropbox";
dropboxConfig: DropboxConfig;
remoteBaseDir: string;
saveUpdatedConfigFunc: () => Promise<any>;
dropbox!: Dropbox;
vaultFolderExists: boolean;
foldersCreatedBefore: Set<string>;
constructor(
dropboxConfig: DropboxConfig,
remoteBaseDir: string,
vaultName: string,
saveUpdatedConfigFunc: () => Promise<any>
) {
super();
this.kind = "dropbox";
this.dropboxConfig = dropboxConfig;
this.remoteBaseDir = remoteBaseDir;
this.remoteBaseDir = this.dropboxConfig.remoteBaseDir || vaultName || "";
this.saveUpdatedConfigFunc = saveUpdatedConfigFunc;
this.vaultFolderExists = false;
this.foldersCreatedBefore = new Set();
}
init = async () => {
async _init() {
// check token
if (
this.dropboxConfig.accessToken === "" ||
@ -388,389 +448,292 @@ export class WrappedDropboxClient {
}
}
return this.dropbox;
};
}
/**
* @param dropboxConfig
* @returns
*/
export const getDropboxClient = (
dropboxConfig: DropboxConfig,
remoteBaseDir: string,
saveUpdatedConfigFunc: () => Promise<any>
) => {
return new WrappedDropboxClient(
dropboxConfig,
remoteBaseDir,
saveUpdatedConfigFunc
);
};
export const getRemoteMeta = async (
client: WrappedDropboxClient,
remotePath: string
) => {
await client.init();
// if (remotePath === "" || remotePath === "/") {
// // filesGetMetadata doesn't support root folder
// // we instead try to list files
// // if no error occurs, we ensemble a fake result.
// const rsp = await retryReq(() =>
// client.dropbox.filesListFolder({
// path: `/${client.remoteBaseDir}`,
// recursive: false, // don't need to recursive here
// })
// );
// if (rsp.status !== 200) {
// throw Error(JSON.stringify(rsp));
// }
// return {
// key: remotePath,
// lastModified: undefined,
// size: 0,
// remoteType: "dropbox",
// etag: undefined,
// } as Entity;
// }
const rsp = await retryReq(() =>
client.dropbox.filesGetMetadata({
path: remotePath,
})
);
if (rsp === undefined) {
throw Error("dropbox.filesGetMetadata undefinded");
return this;
}
if (rsp.status !== 200) {
throw Error(JSON.stringify(rsp));
async walk(): Promise<Entity[]> {
await this._init();
let res = await this.dropbox.filesListFolder({
path: `/${this.remoteBaseDir}`,
recursive: true,
include_deleted: false,
limit: 1000,
});
if (res.status !== 200) {
throw Error(JSON.stringify(res));
}
// console.info(res);
const contents = res.result.entries;
const unifiedContents = contents
.filter((x) => x[".tag"] !== "deleted")
.filter((x) => x.path_display !== `/${this.remoteBaseDir}`)
.map((x) => fromDropboxItemToEntity(x, this.remoteBaseDir));
while (res.result.has_more) {
res = await this.dropbox.filesListFolderContinue({
cursor: res.result.cursor,
});
if (res.status !== 200) {
throw Error(JSON.stringify(res));
}
const contents2 = res.result.entries;
const unifiedContents2 = contents2
.filter((x) => x[".tag"] !== "deleted")
.filter((x) => x.path_display !== `/${this.remoteBaseDir}`)
.map((x) => fromDropboxItemToEntity(x, this.remoteBaseDir));
unifiedContents.push(...unifiedContents2);
}
fixEntityListCasesInplace(unifiedContents);
return unifiedContents;
}
return fromDropboxItemToEntity(rsp.result, client.remoteBaseDir);
};
export const uploadToRemote = async (
client: WrappedDropboxClient,
fileOrFolderPath: string,
vault: Vault | undefined,
isRecursively: boolean,
cipher: Cipher,
remoteEncryptedKey: string = "",
foldersCreatedBefore: Set<string> | undefined = undefined,
uploadRaw: boolean = false,
rawContent: string | ArrayBuffer = "",
rawContentMTime: number = 0,
rawContentCTime: number = 0
): Promise<UploadedType> => {
await client.init();
async stat(key: string): Promise<Entity> {
await this._init();
return await this._statFromRoot(getDropboxPath(key, this.remoteBaseDir));
}
let uploadFile = fileOrFolderPath;
if (!cipher.isPasswordEmpty()) {
if (remoteEncryptedKey === undefined || remoteEncryptedKey === "") {
throw Error(
`uploadToRemote(dropbox) you have password but remoteEncryptedKey is empty!`
async _statFromRoot(key: string): Promise<Entity> {
// if (key === "" || key === "/") {
// // filesGetMetadata doesn't support root folder
// // we instead try to list files
// // if no error occurs, we ensemble a fake result.
// const rsp = await retryReq(() =>
// client.dropbox.filesListFolder({
// path: `/${client.key}`,
// recursive: false, // don't need to recursive here
// })
// );
// if (rsp.status !== 200) {
// throw Error(JSON.stringify(rsp));
// }
// return {
// key: remotePath,
// lastModified: undefined,
// size: 0,
// remoteType: "dropbox",
// etag: undefined,
// } as Entity;
// }
const rsp = await retryReq(() =>
this.dropbox.filesGetMetadata({
path: key,
})
);
if (rsp === undefined) {
throw Error("dropbox.filesGetMetadata undefinded");
}
if (rsp.status !== 200) {
throw Error(JSON.stringify(rsp));
}
return fromDropboxItemToEntity(rsp.result, this.remoteBaseDir);
}
async mkdir(key: string, mtime?: number, ctime?: number): Promise<Entity> {
if (!key.endsWith("/")) {
throw Error(`you should not call mkdir on ${key}`);
}
await this._init();
const uploadFile = getDropboxPath(key, this.remoteBaseDir);
return await this._mkdirFromRoot(uploadFile, mtime, ctime);
}
async _mkdirFromRoot(
key: string,
mtime?: number,
ctime?: number
): Promise<Entity> {
if (hasEmojiInText(key)) {
throw new Error(
`${key}: Error: Dropbox does not support emoji in file / folder names.`
);
}
uploadFile = remoteEncryptedKey;
if (this.foldersCreatedBefore?.has(key)) {
// created, pass
} else {
try {
await retryReq(
() =>
this.dropbox.filesCreateFolderV2({
path: key,
}),
key // just a hint
);
this.foldersCreatedBefore?.add(key);
} catch (e: unknown) {
const err = e as DropboxResponseError<files.CreateFolderError>;
if (err.status === undefined) {
throw err;
}
if (err.status === 409) {
// pass
this.foldersCreatedBefore?.add(key);
} else {
throw err;
}
}
}
return await this._statFromRoot(key);
}
uploadFile = getDropboxPath(uploadFile, client.remoteBaseDir);
if (hasEmojiInText(uploadFile)) {
throw new Error(
`${uploadFile}: Error: Dropbox does not support emoji in file / folder names.`
async writeFile(
key: string,
content: ArrayBuffer,
mtime: number,
ctime: number
): Promise<Entity> {
if (key.endsWith("/")) {
throw Error(`you should not call writeFile on ${key}`);
}
await this._init();
const uploadFile = getDropboxPath(key, this.remoteBaseDir);
return await this._writeFileFromRoot(
uploadFile,
content,
mtime,
ctime,
key
);
}
let mtime = 0;
let ctime = 0;
const s = await vault?.adapter?.stat(fileOrFolderPath);
if (s !== undefined && s !== null) {
mtime = Math.floor(s.mtime / 1000.0) * 1000;
ctime = Math.floor(s.ctime / 1000.0) * 1000;
}
const mtimeStr = new Date(mtime).toISOString().replace(/\.\d{3}Z$/, "Z");
const isFolder = fileOrFolderPath.endsWith("/");
if (isFolder && isRecursively) {
throw Error("upload function doesn't implement recursive function yet!");
} else if (isFolder && !isRecursively) {
if (uploadRaw) {
throw Error(`you specify uploadRaw, but you also provide a folder key!`);
}
// folder
if (cipher.isPasswordEmpty() || cipher.isFolderAware()) {
// if not encrypted, || encrypted isFolderAware, mkdir a remote folder
if (foldersCreatedBefore?.has(uploadFile)) {
// created, pass
} else {
try {
await retryReq(
() =>
client.dropbox.filesCreateFolderV2({
path: uploadFile,
}),
fileOrFolderPath
);
foldersCreatedBefore?.add(uploadFile);
} catch (e: unknown) {
const err = e as DropboxResponseError<files.CreateFolderError>;
if (err.status === undefined) {
throw err;
}
if (err.status === 409) {
// pass
foldersCreatedBefore?.add(uploadFile);
} else {
throw err;
}
}
}
const res = await getRemoteMeta(client, uploadFile);
return {
entity: res,
mtimeCli: mtime,
};
} else {
// if encrypted && !isFolderAware(),
// upload a fake file with the encrypted file name
await retryReq(
() =>
client.dropbox.filesUpload({
path: uploadFile,
contents: "",
client_modified: mtimeStr,
}),
fileOrFolderPath
async _writeFileFromRoot(
key: string,
content: ArrayBuffer,
mtime: number,
ctime: number,
origKey: string
): Promise<Entity> {
if (hasEmojiInText(origKey)) {
throw new Error(
`${origKey}: Error: Dropbox does not support emoji in file / folder names.`
);
return {
entity: await getRemoteMeta(client, uploadFile),
mtimeCli: mtime,
};
}
} else {
// file
// we ignore isRecursively parameter here
let localContent = undefined;
if (uploadRaw) {
if (typeof rawContent === "string") {
localContent = new TextEncoder().encode(rawContent).buffer;
} else {
localContent = rawContent;
}
} else {
if (vault === undefined) {
throw new Error(
`the vault variable is not passed but we want to read ${fileOrFolderPath} for Dropbox`
);
}
localContent = await vault.adapter.readBinary(fileOrFolderPath);
}
let remoteContent = localContent;
if (!cipher.isPasswordEmpty()) {
remoteContent = await cipher.encryptContent(localContent);
}
const mtimeFixed = Math.floor(mtime / 1000.0) * 1000;
const ctimeFixed = Math.floor(ctime / 1000.0) * 1000;
const mtimeStr = new Date(mtimeFixed)
.toISOString()
.replace(/\.\d{3}Z$/, "Z");
// in dropbox, we don't need to create folders before uploading! cool!
// TODO: filesUploadSession for larger files (>=150 MB)
await retryReq(
() =>
client.dropbox.filesUpload({
path: uploadFile,
contents: remoteContent,
this.dropbox.filesUpload({
path: key,
contents: content,
mode: {
".tag": "overwrite",
},
client_modified: mtimeStr,
}),
fileOrFolderPath
origKey // hint
);
// we want to mark that parent folders are created
if (foldersCreatedBefore !== undefined) {
const dirs = getFolderLevels(uploadFile).map((x) =>
getDropboxPath(x, client.remoteBaseDir)
if (this.foldersCreatedBefore !== undefined) {
const dirs = getFolderLevels(origKey).map((x) =>
getDropboxPath(x, this.remoteBaseDir)
);
for (const dir of dirs) {
foldersCreatedBefore?.add(dir);
this.foldersCreatedBefore?.add(dir);
}
}
return {
entity: await getRemoteMeta(client, uploadFile),
mtimeCli: mtime,
};
return await this._statFromRoot(key);
}
};
export const listAllFromRemote = async (client: WrappedDropboxClient) => {
await client.init();
let res = await client.dropbox.filesListFolder({
path: `/${client.remoteBaseDir}`,
recursive: true,
include_deleted: false,
limit: 1000,
});
if (res.status !== 200) {
throw Error(JSON.stringify(res));
}
// console.info(res);
const contents = res.result.entries;
const unifiedContents = contents
.filter((x) => x[".tag"] !== "deleted")
.filter((x) => x.path_display !== `/${client.remoteBaseDir}`)
.map((x) => fromDropboxItemToEntity(x, client.remoteBaseDir));
while (res.result.has_more) {
res = await client.dropbox.filesListFolderContinue({
cursor: res.result.cursor,
});
if (res.status !== 200) {
throw Error(JSON.stringify(res));
async readFile(key: string): Promise<ArrayBuffer> {
await this._init();
if (key.endsWith("/")) {
throw new Error(`you should not call readFile on folder ${key}`);
}
const contents2 = res.result.entries;
const unifiedContents2 = contents2
.filter((x) => x[".tag"] !== "deleted")
.filter((x) => x.path_display !== `/${client.remoteBaseDir}`)
.map((x) => fromDropboxItemToEntity(x, client.remoteBaseDir));
unifiedContents.push(...unifiedContents2);
const downloadFile = getDropboxPath(key, this.remoteBaseDir);
return await this._readFileFromRoot(downloadFile);
}
fixEntityListCasesInplace(unifiedContents);
return unifiedContents;
};
const downloadFromRemoteRaw = async (
client: WrappedDropboxClient,
remotePath: string
) => {
await client.init();
const rsp = await retryReq(
() =>
client.dropbox.filesDownload({
path: remotePath,
}),
`downloadFromRemoteRaw=${remotePath}`
);
if (rsp === undefined) {
throw Error(`unknown rsp from dropbox download: ${rsp}`);
}
if ((rsp.result as any).fileBlob !== undefined) {
// we get a Blob
const content = (rsp.result as any).fileBlob as Blob;
return await content.arrayBuffer();
} else if ((rsp.result as any).fileBinary !== undefined) {
// we get a Buffer
const content = (rsp.result as any).fileBinary as Buffer;
return bufferToArrayBuffer(content);
} else {
throw Error(`unknown rsp from dropbox download: ${rsp}`);
}
};
export const downloadFromRemote = async (
client: WrappedDropboxClient,
fileOrFolderPath: string,
vault: Vault,
mtime: number,
cipher: Cipher,
remoteEncryptedKey: string = "",
skipSaving: boolean = false
) => {
await client.init();
const isFolder = fileOrFolderPath.endsWith("/");
if (!skipSaving) {
await mkdirpInVault(fileOrFolderPath, vault);
}
// the file is always local file
// we need to encrypt it
if (isFolder) {
// mkdirp locally is enough
// do nothing here
return new ArrayBuffer(0);
} else {
let downloadFile = fileOrFolderPath;
if (!cipher.isPasswordEmpty()) {
downloadFile = remoteEncryptedKey;
}
downloadFile = getDropboxPath(downloadFile, client.remoteBaseDir);
const remoteContent = await downloadFromRemoteRaw(client, downloadFile);
let localContent = remoteContent;
if (!cipher.isPasswordEmpty()) {
localContent = await cipher.decryptContent(remoteContent);
}
if (!skipSaving) {
await vault.adapter.writeBinary(fileOrFolderPath, localContent, {
mtime: mtime,
});
}
return localContent;
}
};
export const deleteFromRemote = async (
client: WrappedDropboxClient,
fileOrFolderPath: string,
cipher: Cipher,
remoteEncryptedKey: string = ""
) => {
if (fileOrFolderPath === "/") {
return;
}
let remoteFileName = fileOrFolderPath;
if (!cipher.isPasswordEmpty()) {
remoteFileName = remoteEncryptedKey;
}
remoteFileName = getDropboxPath(remoteFileName, client.remoteBaseDir);
await client.init();
try {
await retryReq(
async _readFileFromRoot(key: string): Promise<ArrayBuffer> {
const rsp = await retryReq(
() =>
client.dropbox.filesDeleteV2({
path: remoteFileName,
this.dropbox.filesDownload({
path: key,
}),
fileOrFolderPath
`downloadFromRemoteRaw=${key}`
);
} catch (err) {
console.error("some error while deleting");
console.error(err);
if (rsp === undefined) {
throw Error(`unknown rsp from dropbox download: ${rsp}`);
}
if ((rsp.result as any).fileBlob !== undefined) {
// we get a Blob
const content = (rsp.result as any).fileBlob as Blob;
return await content.arrayBuffer();
} else if ((rsp.result as any).fileBinary !== undefined) {
// we get a Buffer
const content = (rsp.result as any).fileBinary as Buffer;
return bufferToArrayBuffer(content);
} else {
throw Error(`unknown rsp from dropbox download: ${rsp}`);
}
}
};
export const checkConnectivity = async (
client: WrappedDropboxClient,
callbackFunc?: any
) => {
try {
await client.init();
const results = await getRemoteMeta(client, `/${client.remoteBaseDir}`);
if (results === undefined) {
async rm(key: string): Promise<void> {
if (key === "/") {
return;
}
const remoteFileName = getDropboxPath(key, this.remoteBaseDir);
await this._init();
try {
await retryReq(
() =>
this.dropbox.filesDeleteV2({
path: remoteFileName,
}),
key // just a hint here
);
} catch (err) {
console.error("some error while deleting");
console.error(err);
}
}
async checkConnect(callbackFunc?: any): Promise<boolean> {
try {
await this._init();
const results = await this._statFromRoot(`/${this.remoteBaseDir}`);
if (results === undefined) {
return false;
}
return true;
} catch (err) {
console.debug(err);
callbackFunc?.(err);
return false;
}
return true;
} catch (err) {
console.debug(err);
if (callbackFunc !== undefined) {
callbackFunc(err);
}
return false;
}
};
export const getUserDisplayName = async (client: WrappedDropboxClient) => {
await client.init();
const acct = await client.dropbox.usersGetCurrentAccount();
return acct.result.name.display_name;
};
async getUserDisplayName() {
await this._init();
const acct = await this.dropbox.usersGetCurrentAccount();
return acct.result.name.display_name;
}
export const revokeAuth = async (client: WrappedDropboxClient) => {
await client.init();
await client.dropbox.authTokenRevoke();
};
async revokeAuth() {
try {
await this._init();
await this.dropbox.authTokenRevoke();
return true;
} catch (e) {
return false;
}
}
}

553
src/fsEncrypt.ts Normal file
View File

@ -0,0 +1,553 @@
import { CipherMethodType, Entity } from "./baseTypes";
import * as openssl from "./encryptOpenSSL";
import * as rclone from "./encryptRClone";
import { isVaildText } from "./misc";
import { FakeFs } from "./fsAll";
import cloneDeep from "lodash/cloneDeep";
/**
* quick guess, no actual decryption here
* @param name
* @returns
*/
function isLikelyOpenSSLEncryptedName(name: string): boolean {
if (
name.startsWith(openssl.MAGIC_ENCRYPTED_PREFIX_BASE32) ||
name.startsWith(openssl.MAGIC_ENCRYPTED_PREFIX_BASE64URL)
) {
return true;
}
return false;
}
/**
* quick guess, no actual decryption here
* @param name
* @returns
*/
function isLikelyEncryptedName(name: string): boolean {
return isLikelyOpenSSLEncryptedName(name);
}
/**
* quick guess, no actual decryption here, only openssl can be guessed here
* @param name
* @returns
*/
function isLikelyEncryptedNameNotMatchMethod(
name: string,
method: CipherMethodType
): boolean {
if (isLikelyOpenSSLEncryptedName(name) && method !== "openssl-base64") {
return true;
}
if (!isLikelyOpenSSLEncryptedName(name) && method === "openssl-base64") {
return true;
}
return false;
}
export interface PasswordCheckType {
ok: boolean;
reason:
| "empty_remote"
| "unknown_encryption_method"
| "remote_encrypted_local_no_password"
| "password_matched"
| "password_or_method_not_matched_or_remote_not_encrypted"
| "likely_no_password_both_sides"
| "encryption_method_not_matched";
}
/**
* Useful if isPasswordEmpty()
*/
function copyEntityAndCopyKeyEncSizeEnc(entity: Entity) {
const res = cloneDeep(entity);
res["keyEnc"] = res["keyRaw"];
res["sizeEnc"] = res["sizeRaw"];
return res;
}
export class FakeFsEncrypt extends FakeFs {
innerFs: FakeFs;
readonly password: string;
readonly method: CipherMethodType;
cipherRClone?: rclone.CipherRclone;
cacheMapOrigToEnc: Record<string, string>;
hasCacheMap: boolean;
kind: string;
innerWalkResultCache?: Entity[];
innerWalkResultCacheTime?: number;
constructor(innerFs: FakeFs, password: string, method: CipherMethodType) {
super();
this.innerFs = innerFs;
this.password = password ?? "";
this.method = method;
this.cacheMapOrigToEnc = {};
this.hasCacheMap = false;
this.kind = `encrypt(${this.innerFs.kind},${method})`;
if (method === "rclone-base64") {
this.cipherRClone = new rclone.CipherRclone(password, 5);
}
}
isPasswordEmpty() {
return this.password === "";
}
isFolderAware() {
if (this.method === "openssl-base64") {
return false;
}
if (this.method === "rclone-base64") {
return true;
}
throw Error(`no idea about isFolderAware for method=${this.method}`);
}
/**
* we want a little caching here.
*/
async _getInnerWalkResult(): Promise<Entity[]> {
let innerWalkResult: Entity[] | undefined = undefined;
if (
this.innerWalkResultCacheTime !== undefined &&
this.innerWalkResultCacheTime >= Date.now() - 1000
) {
innerWalkResult = this.innerWalkResultCache!;
} else {
innerWalkResult = await this.innerFs.walk();
this.innerWalkResultCache = innerWalkResult;
this.innerWalkResultCacheTime = Date.now();
}
return innerWalkResult;
}
async isPasswordOk(): Promise<PasswordCheckType> {
const innerWalkResult = await this._getInnerWalkResult();
if (innerWalkResult === undefined || innerWalkResult.length === 0) {
// remote empty
return {
ok: true,
reason: "empty_remote",
};
}
const santyCheckKey = innerWalkResult[0].keyRaw;
if (this.isPasswordEmpty()) {
// TODO: no way to distinguish remote rclone encrypted
// if local has no password??
if (isLikelyEncryptedName(santyCheckKey)) {
return {
ok: false,
reason: "remote_encrypted_local_no_password",
};
} else {
return {
ok: true,
reason: "likely_no_password_both_sides",
};
}
} else {
if (this.method === "unknown") {
return {
ok: false,
reason: "unknown_encryption_method",
};
}
if (isLikelyEncryptedNameNotMatchMethod(santyCheckKey, this.method)) {
return {
ok: false,
reason: "encryption_method_not_matched",
};
}
try {
const k = await this._decryptName(santyCheckKey);
if (k === undefined) {
throw Error(`decryption failed`);
}
return {
ok: true,
reason: "password_matched",
};
} catch (error) {
return {
ok: false,
reason: "password_or_method_not_matched_or_remote_not_encrypted",
};
}
}
}
async walk(): Promise<Entity[]> {
const innerWalkResult = await this._getInnerWalkResult();
const res: Entity[] = [];
if (this.isPasswordEmpty()) {
for (const innerEntity of innerWalkResult) {
res.push(copyEntityAndCopyKeyEncSizeEnc(innerEntity));
this.cacheMapOrigToEnc[innerEntity.key!] = innerEntity.key!;
}
this.hasCacheMap = true;
return res;
} else {
for (const innerEntity of innerWalkResult) {
const key = await this._decryptName(innerEntity.keyRaw);
const size = key.endsWith("/") ? 0 : undefined;
res.push({
key: key,
keyRaw: innerEntity.keyRaw,
keyEnc: innerEntity.key!,
mtimeCli: innerEntity.mtimeCli,
mtimeSvr: innerEntity.mtimeSvr,
size: size,
sizeEnc: innerEntity.size!,
sizeRaw: innerEntity.sizeRaw,
hash: undefined,
});
this.cacheMapOrigToEnc[key] = innerEntity.keyRaw;
}
this.hasCacheMap = true;
return res;
}
}
async stat(key: string): Promise<Entity> {
if (!this.hasCacheMap) {
throw new Error("You have to build the cacheMap firstly for stat");
}
const keyEnc = this.cacheMapOrigToEnc[key];
if (keyEnc === undefined) {
throw new Error(`no encrypted key ${key} before!`);
}
const innerEntity = await this.innerFs.stat(keyEnc);
if (this.isPasswordEmpty()) {
return copyEntityAndCopyKeyEncSizeEnc(innerEntity);
} else {
return {
key: key,
keyRaw: innerEntity.keyRaw,
keyEnc: innerEntity.key!,
mtimeCli: innerEntity.mtimeCli,
mtimeSvr: innerEntity.mtimeSvr,
size: undefined,
sizeEnc: innerEntity.size!,
sizeRaw: innerEntity.sizeRaw,
hash: undefined,
};
}
}
async mkdir(key: string, mtime?: number, ctime?: number): Promise<Entity> {
if (!this.hasCacheMap) {
throw new Error("You have to build the cacheMap firstly for mkdir");
}
if (!key.endsWith("/")) {
throw new Error(`should not call mkdir on ${key}`);
}
let keyEnc = this.cacheMapOrigToEnc[key];
if (keyEnc === undefined) {
if (this.isPasswordEmpty()) {
keyEnc = key;
} else {
keyEnc = await this._encryptName(key);
}
this.cacheMapOrigToEnc[key] = keyEnc;
}
if (this.isPasswordEmpty() || this.isFolderAware()) {
const innerEntity = await this.innerFs.mkdir(keyEnc, mtime, ctime);
return copyEntityAndCopyKeyEncSizeEnc(innerEntity);
} else {
const now = Date.now();
const innerEntity = await this.innerFs.writeFile(
keyEnc,
new ArrayBuffer(0),
mtime ?? now,
ctime ?? now
);
return {
key: key,
keyRaw: innerEntity.keyRaw,
keyEnc: innerEntity.key!,
mtimeCli: innerEntity.mtimeCli,
mtimeSvr: innerEntity.mtimeSvr,
size: 0,
sizeEnc: innerEntity.size!,
sizeRaw: innerEntity.sizeRaw,
hash: undefined,
};
}
}
async writeFile(
key: string,
content: ArrayBuffer,
mtime: number,
ctime: number
): Promise<Entity> {
if (!this.hasCacheMap) {
throw new Error("You have to build the cacheMap firstly for readFile");
}
let keyEnc = this.cacheMapOrigToEnc[key];
if (keyEnc === undefined) {
if (this.isPasswordEmpty()) {
keyEnc = key;
} else {
keyEnc = await this._encryptName(key);
}
this.cacheMapOrigToEnc[key] = keyEnc;
}
if (this.isPasswordEmpty()) {
const innerEntity = await this.innerFs.writeFile(
keyEnc,
content,
mtime,
ctime
);
return copyEntityAndCopyKeyEncSizeEnc(innerEntity);
} else {
const contentEnc = await this._encryptContent(content);
const innerEntity = await this.innerFs.writeFile(
keyEnc,
contentEnc,
mtime,
ctime
);
return {
key: key,
keyRaw: innerEntity.keyRaw,
keyEnc: innerEntity.key!,
mtimeCli: innerEntity.mtimeCli,
mtimeSvr: innerEntity.mtimeSvr,
size: undefined,
sizeEnc: innerEntity.size!,
sizeRaw: innerEntity.sizeRaw,
hash: undefined,
};
}
}
async readFile(key: string): Promise<ArrayBuffer> {
if (!this.hasCacheMap) {
throw new Error("You have to build the cacheMap firstly for readFile");
}
const keyEnc = this.cacheMapOrigToEnc[key];
if (keyEnc === undefined) {
throw new Error(`no encrypted key ${key} before! cannot readFile`);
}
const contentEnc = await this.innerFs.readFile(keyEnc);
if (this.isPasswordEmpty()) {
return contentEnc;
} else {
const res = await this._decryptContent(contentEnc);
return res;
}
}
async rm(key: string): Promise<void> {
if (!this.hasCacheMap) {
throw new Error("You have to build the cacheMap firstly for rm");
}
const keyEnc = this.cacheMapOrigToEnc[key];
if (keyEnc === undefined) {
throw new Error(`no encrypted key ${key} before! cannot rm`);
}
return await this.innerFs.rm(keyEnc);
}
async checkConnect(callbackFunc?: any): Promise<boolean> {
return await this.innerFs.checkConnect(callbackFunc);
}
async closeResources() {
if (this.method === "rclone-base64" && this.cipherRClone !== undefined) {
this.cipherRClone.closeResources();
}
}
async encryptEntity(input: Entity): Promise<Entity> {
if (input.key === undefined) {
// input.key should always have value
throw Error(`input ${input.keyRaw} is abnormal without key`);
}
if (this.isPasswordEmpty()) {
return copyEntityAndCopyKeyEncSizeEnc(input);
}
// below is for having password
const local = cloneDeep(input);
if (local.sizeEnc === undefined && local.size !== undefined) {
// it's not filled yet, we fill it
// local.size is possibly undefined if it's "prevSync" Entity
// but local.key should always have value
local.sizeEnc = this._getSizeFromOrigToEnc(local.size);
}
if (local.keyEnc === undefined || local.keyEnc === "") {
let keyEnc = this.cacheMapOrigToEnc[input.key];
if (keyEnc !== undefined && keyEnc !== "" && keyEnc !== local.key) {
// we can reuse remote encrypted key if any
local.keyEnc = keyEnc;
} else {
// we assign a new encrypted key because of no remote
keyEnc = await this._encryptName(input.key);
local.keyEnc = keyEnc;
// remember to add back to cache!
this.cacheMapOrigToEnc[input.key] = keyEnc;
}
}
return local;
}
async _encryptContent(content: ArrayBuffer) {
// console.debug("start encryptContent");
if (this.password === "") {
return content;
}
if (this.method === "openssl-base64") {
const res = await openssl.encryptArrayBuffer(content, this.password);
if (res === undefined) {
throw Error(`cannot encrypt content`);
}
return res;
} else if (this.method === "rclone-base64") {
const res =
await this.cipherRClone!.encryptContentByCallingWorker(content);
if (res === undefined) {
throw Error(`cannot encrypt content`);
}
return res;
} else {
throw Error(`not supported encrypt method=${this.method}`);
}
}
async _decryptContent(content: ArrayBuffer) {
// console.debug("start decryptContent");
if (this.password === "") {
return content;
}
if (this.method === "openssl-base64") {
const res = await openssl.decryptArrayBuffer(content, this.password);
if (res === undefined) {
throw Error(`cannot decrypt content`);
}
return res;
} else if (this.method === "rclone-base64") {
const res =
await this.cipherRClone!.decryptContentByCallingWorker(content);
if (res === undefined) {
throw Error(`cannot decrypt content`);
}
return res;
} else {
throw Error(`not supported decrypt method=${this.method}`);
}
}
async _encryptName(name: string) {
// console.debug("start encryptName");
if (this.password === "") {
return name;
}
if (this.method === "openssl-base64") {
const res = await openssl.encryptStringToBase64url(name, this.password);
if (res === undefined) {
throw Error(`cannot encrypt name=${name}`);
}
return res;
} else if (this.method === "rclone-base64") {
const res = await this.cipherRClone!.encryptNameByCallingWorker(name);
if (res === undefined) {
throw Error(`cannot encrypt name=${name}`);
}
return res;
} else {
throw Error(`not supported encrypt method=${this.method}`);
}
}
async _decryptName(name: string): Promise<string> {
// console.debug("start decryptName");
if (this.password === "") {
return name;
}
if (this.method === "openssl-base64") {
if (name.startsWith(openssl.MAGIC_ENCRYPTED_PREFIX_BASE32)) {
// backward compitable with the openssl-base32
try {
const res = await openssl.decryptBase32ToString(name, this.password);
if (res !== undefined && isVaildText(res)) {
return res;
} else {
throw Error(`cannot decrypt name=${name}`);
}
} catch (error) {
throw Error(`cannot decrypt name=${name}`);
}
} else if (name.startsWith(openssl.MAGIC_ENCRYPTED_PREFIX_BASE64URL)) {
try {
const res = await openssl.decryptBase64urlToString(
name,
this.password
);
if (res !== undefined && isVaildText(res)) {
return res;
} else {
throw Error(`cannot decrypt name=${name}`);
}
} catch (error) {
throw Error(`cannot decrypt name=${name}`);
}
} else {
throw Error(
`method=${this.method} but the name=${name}, likely mismatch`
);
}
} else if (this.method === "rclone-base64") {
const res = await this.cipherRClone!.decryptNameByCallingWorker(name);
if (res === undefined) {
throw Error(`cannot decrypt name=${name}`);
}
return res;
} else {
throw Error(`not supported decrypt method=${this.method}`);
}
}
_getSizeFromOrigToEnc(x: number) {
if (this.password === "") {
return x;
}
if (this.method === "openssl-base64") {
return openssl.getSizeFromOrigToEnc(x);
} else if (this.method === "rclone-base64") {
return rclone.getSizeFromOrigToEnc(x);
} else {
throw Error(`not supported encrypt method=${this.method}`);
}
}
async getUserDisplayName(): Promise<string> {
return await this.innerFs.getUserDisplayName();
}
async revokeAuth(): Promise<any> {
return await this.innerFs.revokeAuth();
}
}

45
src/fsGetter.ts Normal file
View File

@ -0,0 +1,45 @@
import { RemotelySavePluginSettings } from "./baseTypes";
import { FakeFs } from "./fsAll";
import { FakeFsDropbox } from "./fsDropbox";
import { FakeFsOnedrive } from "./fsOnedrive";
import { FakeFsS3 } from "./fsS3";
import { FakeFsWebdav } from "./fsWebdav";
/**
* To avoid circular dependency, we need a new file here.
*/
export function getClient(
settings: RemotelySavePluginSettings,
vaultName: string,
saveUpdatedConfigFunc: () => Promise<any>
): FakeFs {
switch (settings.serviceType) {
case "s3":
return new FakeFsS3(settings.s3);
break;
case "webdav":
return new FakeFsWebdav(
settings.webdav,
vaultName,
saveUpdatedConfigFunc
);
break;
case "dropbox":
return new FakeFsDropbox(
settings.dropbox,
vaultName,
saveUpdatedConfigFunc
);
break;
case "onedrive":
return new FakeFsOnedrive(
settings.onedrive,
vaultName,
saveUpdatedConfigFunc
);
break;
default:
throw Error(`cannot init client for serviceType=${settings.serviceType}`);
break;
}
}

171
src/fsLocal.ts Normal file
View File

@ -0,0 +1,171 @@
import { DEFAULT_DEBUG_FOLDER, Entity } from "./baseTypes";
import { FakeFs } from "./fsAll";
import { TFile, TFolder, type Vault } from "obsidian";
import { listFilesInObsFolder } from "./obsFolderLister";
import { Profiler } from "./profiler";
import { getFolderLevels, mkdirpInVault, statFix } from "./misc";
export class FakeFsLocal extends FakeFs {
vault: Vault;
syncConfigDir: boolean;
configDir: string;
pluginID: string;
profiler: Profiler;
deleteToWhere: "obsidian" | "system";
kind: "local";
constructor(
vault: Vault,
syncConfigDir: boolean,
configDir: string,
pluginID: string,
profiler: Profiler,
deleteToWhere: "obsidian" | "system"
) {
super();
this.vault = vault;
this.syncConfigDir = syncConfigDir;
this.configDir = configDir;
this.pluginID = pluginID;
this.profiler = profiler;
this.deleteToWhere = deleteToWhere;
this.kind = "local";
}
async walk(): Promise<Entity[]> {
this.profiler.addIndent();
this.profiler.insert("enter walk for local");
const local: Entity[] = [];
const localTAbstractFiles = this.vault.getAllLoadedFiles();
this.profiler.insert("finish getting walk for local");
for (const entry of localTAbstractFiles) {
let r: Entity | undefined = undefined;
let key = entry.path;
if (entry.path === "/") {
// ignore
continue;
} else if (entry instanceof TFile) {
let mtimeLocal: number | undefined = entry.stat.mtime;
if (mtimeLocal <= 0) {
mtimeLocal = entry.stat.ctime;
}
if (mtimeLocal === 0) {
mtimeLocal = undefined;
}
if (mtimeLocal === undefined) {
throw Error(
`Your file has last modified time 0: ${key}, don't know how to deal with it`
);
}
r = {
key: entry.path, // local always unencrypted
keyRaw: entry.path,
mtimeCli: mtimeLocal,
mtimeSvr: mtimeLocal,
size: entry.stat.size, // local always unencrypted
sizeRaw: entry.stat.size,
};
} else if (entry instanceof TFolder) {
key = `${entry.path}/`;
r = {
key: key,
keyRaw: key,
size: 0,
sizeRaw: 0,
};
} else {
throw Error(`unexpected ${entry}`);
}
if (r.keyRaw.startsWith(DEFAULT_DEBUG_FOLDER)) {
// skip listing the debug folder,
// which should always not involved in sync
continue;
} else {
local.push(r);
}
}
this.profiler.insert("finish transforming walk for local");
if (this.syncConfigDir) {
this.profiler.insert("into syncConfigDir");
const syncFiles = await listFilesInObsFolder(
this.configDir,
this.vault,
this.pluginID
);
for (const f of syncFiles) {
local.push(f);
}
this.profiler.insert("finish syncConfigDir");
}
this.profiler.insert("finish walk for local");
this.profiler.removeIndent();
return local;
}
async stat(key: string): Promise<Entity> {
const statRes = await statFix(this.vault, key);
if (statRes === undefined || statRes === null) {
throw Error(`${key} does not exist! cannot stat for local`);
}
const isFolder = statRes.type === "folder";
return {
key: isFolder ? `${key}/` : key, // local always unencrypted
keyRaw: isFolder ? `${key}/` : key,
mtimeCli: statRes.mtime,
mtimeSvr: statRes.mtime,
size: statRes.size, // local always unencrypted
sizeRaw: statRes.size,
};
}
async mkdir(key: string, mtime?: number, ctime?: number): Promise<Entity> {
// console.debug(`mkdir: ${key}`);
await mkdirpInVault(key, this.vault);
return await this.stat(key);
}
async writeFile(
key: string,
content: ArrayBuffer,
mtime: number,
ctime: number
): Promise<Entity> {
await this.vault.adapter.writeBinary(key, content, {
mtime: mtime,
});
return await this.stat(key);
}
async readFile(key: string): Promise<ArrayBuffer> {
return await this.vault.adapter.readBinary(key);
}
async rm(key: string): Promise<void> {
if (this.deleteToWhere === "obsidian") {
await this.vault.adapter.trashLocal(key);
} else {
// "system"
if (!(await this.vault.adapter.trashSystem(key))) {
await this.vault.adapter.trashLocal(key);
}
}
}
async checkConnect(callbackFunc?: any): Promise<boolean> {
return true;
}
async getUserDisplayName(): Promise<string> {
throw new Error("Method not implemented.");
}
async revokeAuth(): Promise<any> {
throw new Error("Method not implemented.");
}
}

52
src/fsMock.ts Normal file
View File

@ -0,0 +1,52 @@
import { Entity } from "./baseTypes";
import { FakeFs } from "./fsAll";
export class FakeFsMock extends FakeFs {
kind: "mock";
constructor() {
super();
this.kind = "mock";
}
async walk(): Promise<Entity[]> {
throw new Error("Method not implemented.");
}
async stat(key: string): Promise<Entity> {
throw new Error("Method not implemented.");
}
async mkdir(key: string, mtime: number, ctime: number): Promise<Entity> {
throw new Error("Method not implemented.");
}
async writeFile(
key: string,
content: ArrayBuffer,
mtime: number,
ctime: number
): Promise<Entity> {
throw new Error("Method not implemented.");
}
async readFile(key: string): Promise<ArrayBuffer> {
throw new Error("Method not implemented.");
}
async rm(key: string): Promise<void> {
throw new Error("Method not implemented.");
}
async checkConnect(callbackFunc?: any): Promise<boolean> {
return true;
}
async getUserDisplayName(): Promise<string> {
throw new Error("Method not implemented.");
}
async revokeAuth(): Promise<any> {
throw new Error("Method not implemented.");
}
}

View File

@ -1,29 +1,23 @@
import { CryptoProvider, PublicClientApplication } from "@azure/msal-node";
import { AuthenticationProvider } from "@microsoft/microsoft-graph-client";
import type {
DriveItem,
FileSystemInfo,
UploadSession,
User,
} from "@microsoft/microsoft-graph-types";
import { CryptoProvider, PublicClientApplication } from "@azure/msal-node";
import { AuthenticationProvider } from "@microsoft/microsoft-graph-client";
import cloneDeep from "lodash/cloneDeep";
import { request, requestUrl, requireApiVersion, Vault } from "obsidian";
import { request, requestUrl } from "obsidian";
import {
VALID_REQURL,
COMMAND_CALLBACK_ONEDRIVE,
DEFAULT_CONTENT_TYPE,
Entity,
OAUTH2_FORCE_EXPIRE_MILLISECONDS,
OnedriveConfig,
Entity,
UploadedType,
VALID_REQURL,
} from "./baseTypes";
import {
bufferToArrayBuffer,
getRandomArrayBuffer,
getRandomIntInclusive,
mkdirpInVault,
} from "./misc";
import { Cipher } from "./encryptUnified";
import { FakeFs } from "./fsAll";
import { bufferToArrayBuffer } from "./misc";
const SCOPES = ["User.Read", "Files.ReadWrite.AppFolder", "offline_access"];
const REDIRECT_URI = `obsidian://${COMMAND_CALLBACK_ONEDRIVE}`;
@ -237,23 +231,6 @@ const getOnedrivePath = (fileOrFolderPath: string, remoteBaseDir: string) => {
return key;
};
const getNormPath = (fileOrFolderPath: string, remoteBaseDir: string) => {
const prefix = `/drive/special/approot:/${remoteBaseDir}`;
if (
!(fileOrFolderPath === prefix || fileOrFolderPath.startsWith(`${prefix}/`))
) {
throw Error(
`"${fileOrFolderPath}" doesn't starts with "${prefix}/" or equals to "${prefix}"`
);
}
if (fileOrFolderPath === prefix) {
return "/";
}
return fileOrFolderPath.slice(`${prefix}/`.length);
};
const constructFromDriveItemToEntityError = (x: DriveItem) => {
return `parentPath="${
x.parentReference?.path ?? "(no parentReference or path)"
@ -361,15 +338,20 @@ const fromDriveItemToEntity = (x: DriveItem, remoteBaseDir: string): Entity => {
const mtimeSvr = Date.parse(x?.fileSystemInfo!.lastModifiedDateTime!);
const mtimeCli = Date.parse(x?.fileSystemInfo!.lastModifiedDateTime!);
return {
key: key,
keyRaw: key,
mtimeSvr: mtimeSvr,
mtimeCli: mtimeCli,
size: isFolder ? 0 : x.size!,
sizeRaw: isFolder ? 0 : x.size!,
// hash: ?? // TODO
etag: x.cTag || "", // do NOT use x.eTag because it changes if meta changes
};
};
////////////////////////////////////////////////////////////////////////////////
// The client.
////////////////////////////////////////////////////////////////////////////////
// to adapt to the required interface
class MyAuthProvider implements AuthenticationProvider {
onedriveConfig: OnedriveConfig;
@ -381,7 +363,8 @@ class MyAuthProvider implements AuthenticationProvider {
this.onedriveConfig = onedriveConfig;
this.saveUpdatedConfigFunc = saveUpdatedConfigFunc;
}
getAccessToken = async () => {
async getAccessToken() {
if (
this.onedriveConfig.accessToken === "" ||
this.onedriveConfig.refreshToken === ""
@ -415,7 +398,7 @@ class MyAuthProvider implements AuthenticationProvider {
console.info("Onedrive accessToken updated");
return this.onedriveConfig.accessToken;
}
};
}
}
/**
@ -431,25 +414,31 @@ export const getShrinkedSettings = (onedriveConfig: OnedriveConfig) => {
return config;
};
export class WrappedOnedriveClient {
export class FakeFsOnedrive extends FakeFs {
kind: "onedrive";
onedriveConfig: OnedriveConfig;
remoteBaseDir: string;
vaultFolderExists: boolean;
authGetter: MyAuthProvider;
saveUpdatedConfigFunc: () => Promise<any>;
foldersCreatedBefore: Set<string>;
constructor(
onedriveConfig: OnedriveConfig,
remoteBaseDir: string,
vaultName: string,
saveUpdatedConfigFunc: () => Promise<any>
) {
super();
this.kind = "onedrive";
this.onedriveConfig = onedriveConfig;
this.remoteBaseDir = remoteBaseDir;
this.remoteBaseDir = this.onedriveConfig.remoteBaseDir || vaultName || "";
this.vaultFolderExists = false;
this.saveUpdatedConfigFunc = saveUpdatedConfigFunc;
this.authGetter = new MyAuthProvider(onedriveConfig, saveUpdatedConfigFunc);
this.foldersCreatedBefore = new Set();
}
init = async () => {
async _init() {
// check token
if (
this.onedriveConfig.accessToken === "" ||
@ -463,14 +452,14 @@ export class WrappedOnedriveClient {
if (this.vaultFolderExists) {
// console.info(`already checked, /${this.remoteBaseDir} exist before`)
} else {
const k = await this.getJson("/drive/special/approot/children");
const k = await this._getJson("/drive/special/approot/children");
// console.debug(k);
this.vaultFolderExists =
(k.value as DriveItem[]).filter((x) => x.name === this.remoteBaseDir)
.length > 0;
if (!this.vaultFolderExists) {
console.info(`remote does not have folder /${this.remoteBaseDir}`);
await this.postJson("/drive/special/approot/children", {
await this._postJson("/drive/special/approot/children", {
name: `${this.remoteBaseDir}`,
folder: {},
"@microsoft.graph.conflictBehavior": "replace",
@ -481,9 +470,9 @@ export class WrappedOnedriveClient {
// console.info(`remote folder /${this.remoteBaseDir} exists`);
}
}
};
}
buildUrl = (pathFragOrig: string) => {
_buildUrl(pathFragOrig: string) {
const API_PREFIX = "https://graph.microsoft.com/v1.0";
let theUrl = "";
if (
@ -501,10 +490,10 @@ export class WrappedOnedriveClient {
theUrl = theUrl.replace(/#/g, "%23");
// console.debug(`building url: [${pathFragOrig}] => [${theUrl}]`)
return theUrl;
};
}
getJson = async (pathFragOrig: string) => {
const theUrl = this.buildUrl(pathFragOrig);
async _getJson(pathFragOrig: string) {
const theUrl = this._buildUrl(pathFragOrig);
console.debug(`getJson, theUrl=${theUrl}`);
return JSON.parse(
await request({
@ -517,10 +506,10 @@ export class WrappedOnedriveClient {
},
})
);
};
}
postJson = async (pathFragOrig: string, payload: any) => {
const theUrl = this.buildUrl(pathFragOrig);
async _postJson(pathFragOrig: string, payload: any) {
const theUrl = this._buildUrl(pathFragOrig);
console.debug(`postJson, theUrl=${theUrl}`);
return JSON.parse(
await request({
@ -533,10 +522,10 @@ export class WrappedOnedriveClient {
},
})
);
};
}
patchJson = async (pathFragOrig: string, payload: any) => {
const theUrl = this.buildUrl(pathFragOrig);
async _patchJson(pathFragOrig: string, payload: any) {
const theUrl = this._buildUrl(pathFragOrig);
console.debug(`patchJson, theUrl=${theUrl}`);
return JSON.parse(
await request({
@ -549,10 +538,10 @@ export class WrappedOnedriveClient {
},
})
);
};
}
deleteJson = async (pathFragOrig: string) => {
const theUrl = this.buildUrl(pathFragOrig);
async _deleteJson(pathFragOrig: string) {
const theUrl = this._buildUrl(pathFragOrig);
console.debug(`deleteJson, theUrl=${theUrl}`);
if (VALID_REQURL) {
await requestUrl({
@ -570,10 +559,10 @@ export class WrappedOnedriveClient {
},
});
}
};
}
putArrayBuffer = async (pathFragOrig: string, payload: ArrayBuffer) => {
const theUrl = this.buildUrl(pathFragOrig);
async _putArrayBuffer(pathFragOrig: string, payload: ArrayBuffer) {
const theUrl = this._buildUrl(pathFragOrig);
console.debug(`putArrayBuffer, theUrl=${theUrl}`);
// TODO:
// 20220401: On Android, requestUrl has issue that text becomes base64.
@ -601,7 +590,7 @@ export class WrappedOnedriveClient {
});
return (await res.json()) as DriveItem | UploadSession;
}
};
}
/**
* A specialized function to upload large files by parts
@ -611,14 +600,14 @@ export class WrappedOnedriveClient {
* @param rangeEnd the end, exclusive
* @param size
*/
putUint8ArrayByRange = async (
async _putUint8ArrayByRange(
pathFragOrig: string,
payload: Uint8Array,
rangeStart: number,
rangeEnd: number,
size: number
) => {
const theUrl = this.buildUrl(pathFragOrig);
) {
const theUrl = this._buildUrl(pathFragOrig);
console.debug(
`putUint8ArrayByRange, theUrl=${theUrl}, range=${rangeStart}-${
rangeEnd - 1
@ -654,201 +643,140 @@ export class WrappedOnedriveClient {
});
return (await res.json()) as DriveItem | UploadSession;
}
};
}
export const getOnedriveClient = (
onedriveConfig: OnedriveConfig,
remoteBaseDir: string,
saveUpdatedConfigFunc: () => Promise<any>
) => {
return new WrappedOnedriveClient(
onedriveConfig,
remoteBaseDir,
saveUpdatedConfigFunc
);
};
/**
* Use delta api to list all files and folders
* https://docs.microsoft.com/en-us/onedrive/developer/rest-api/api/driveitem_delta?view=odsp-graph-online
* @param client
*/
export const listAllFromRemote = async (client: WrappedOnedriveClient) => {
await client.init();
const NEXT_LINK_KEY = "@odata.nextLink";
const DELTA_LINK_KEY = "@odata.deltaLink";
let res = await client.getJson(
`/drive/special/approot:/${client.remoteBaseDir}:/delta`
);
let driveItems = res.value as DriveItem[];
// console.debug(driveItems);
while (NEXT_LINK_KEY in res) {
res = await client.getJson(res[NEXT_LINK_KEY]);
driveItems.push(...cloneDeep(res.value as DriveItem[]));
}
// lastly we should have delta link?
if (DELTA_LINK_KEY in res) {
client.onedriveConfig.deltaLink = res[DELTA_LINK_KEY];
await client.saveUpdatedConfigFunc();
/**
* Use delta api to list all files and folders
* https://docs.microsoft.com/en-us/onedrive/developer/rest-api/api/driveitem_delta?view=odsp-graph-online
*/
async walk(): Promise<Entity[]> {
await this._init();
const NEXT_LINK_KEY = "@odata.nextLink";
const DELTA_LINK_KEY = "@odata.deltaLink";
let res = await this._getJson(
`/drive/special/approot:/${this.remoteBaseDir}:/delta`
);
let driveItems = res.value as DriveItem[];
// console.debug(driveItems);
while (NEXT_LINK_KEY in res) {
res = await this._getJson(res[NEXT_LINK_KEY]);
driveItems.push(...cloneDeep(res.value as DriveItem[]));
}
// lastly we should have delta link?
if (DELTA_LINK_KEY in res) {
this.onedriveConfig.deltaLink = res[DELTA_LINK_KEY];
await this.saveUpdatedConfigFunc();
}
// unify everything to Entity
const unifiedContents = driveItems
.map((x) => fromDriveItemToEntity(x, this.remoteBaseDir))
.filter((x) => x.key !== "/");
return unifiedContents;
}
// unify everything to Entity
const unifiedContents = driveItems
.map((x) => fromDriveItemToEntity(x, client.remoteBaseDir))
.filter((x) => x.keyRaw !== "/");
async stat(key: string): Promise<Entity> {
await this._init();
return await this._statFromRoot(getOnedrivePath(key, this.remoteBaseDir));
}
return unifiedContents;
};
async _statFromRoot(key: string): Promise<Entity> {
// console.info(`remotePath=${remotePath}`);
const rsp = await this._getJson(
`${key}?$select=cTag,eTag,fileSystemInfo,folder,file,name,parentReference,size`
);
// console.info(rsp);
const driveItem = rsp as DriveItem;
const res = fromDriveItemToEntity(driveItem, this.remoteBaseDir);
// console.info(res);
return res;
}
export const getRemoteMeta = async (
client: WrappedOnedriveClient,
remotePath: string
) => {
await client.init();
// console.info(`remotePath=${remotePath}`);
const rsp = await client.getJson(
`${remotePath}?$select=cTag,eTag,fileSystemInfo,folder,file,name,parentReference,size`
);
// console.info(rsp);
const driveItem = rsp as DriveItem;
const res = fromDriveItemToEntity(driveItem, client.remoteBaseDir);
// console.info(res);
return res;
};
async mkdir(key: string, mtime?: number, ctime?: number): Promise<Entity> {
if (!key.endsWith("/")) {
throw Error(`you should not call mkdir on ${key}`);
}
await this._init();
const uploadFolder = getOnedrivePath(key, this.remoteBaseDir);
console.debug(`mkdir uploadFolder=${uploadFolder}`);
return await this._mkdirFromRoot(uploadFolder, mtime, ctime);
}
export const uploadToRemote = async (
client: WrappedOnedriveClient,
fileOrFolderPath: string,
vault: Vault | undefined,
isRecursively: boolean,
cipher: Cipher,
remoteEncryptedKey: string = "",
foldersCreatedBefore: Set<string> | undefined = undefined,
uploadRaw: boolean = false,
rawContent: string | ArrayBuffer = ""
): Promise<UploadedType> => {
await client.init();
async _mkdirFromRoot(
key: string,
mtime?: number,
ctime?: number
): Promise<Entity> {
// console.debug(`foldersCreatedBefore=${Array.from(this.foldersCreatedBefore)}`);
if (this.foldersCreatedBefore.has(key)) {
// created, pass
// console.debug(`folder ${key} created.`)
} else {
// https://stackoverflow.com/questions/56479865/creating-nested-folders-in-one-go-onedrive-api
// use PATCH to create folder recursively!!!
let playload: any = {
folder: {},
"@microsoft.graph.conflictBehavior": "replace",
};
const fileSystemInfo: Record<string, string> = {};
if (mtime !== undefined && mtime !== 0) {
const mtimeStr = new Date(mtime).toISOString();
fileSystemInfo["lastModifiedDateTime"] = mtimeStr;
}
if (ctime !== undefined && ctime !== 0) {
const ctimeStr = new Date(ctime).toISOString();
fileSystemInfo["createdDateTime"] = ctimeStr;
}
if (Object.keys(fileSystemInfo).length > 0) {
playload["fileSystemInfo"] = fileSystemInfo;
}
await this._patchJson(key, playload);
}
const res = await this._statFromRoot(key);
return res;
}
let uploadFile = fileOrFolderPath;
if (!cipher.isPasswordEmpty()) {
if (remoteEncryptedKey === undefined || remoteEncryptedKey === "") {
async writeFile(
key: string,
content: ArrayBuffer,
mtime: number,
ctime: number
): Promise<Entity> {
if (key.endsWith("/")) {
throw Error(`you should not call writeFile on ${key}`);
}
await this._init();
const uploadFile = getOnedrivePath(key, this.remoteBaseDir);
console.debug(`uploadFile=${uploadFile}`);
return await this._writeFileFromRoot(
uploadFile,
content,
mtime,
ctime,
key
);
}
async _writeFileFromRoot(
key: string,
content: ArrayBuffer,
mtime: number,
ctime: number,
origKey: string
): Promise<Entity> {
if (content.byteLength === 0) {
throw Error(
`uploadToRemote(onedrive) you have password but remoteEncryptedKey is empty!`
`${origKey}: Empty file is not allowed in OneDrive, and please write something in it.`
);
}
uploadFile = remoteEncryptedKey;
}
uploadFile = getOnedrivePath(uploadFile, client.remoteBaseDir);
console.debug(`uploadFile=${uploadFile}`);
let mtime = 0;
let ctime = 0;
const s = await vault?.adapter?.stat(fileOrFolderPath);
if (s !== undefined && s !== null) {
mtime = s.mtime;
ctime = s.ctime;
}
const ctimeStr = new Date(ctime).toISOString();
const mtimeStr = new Date(mtime).toISOString();
const isFolder = fileOrFolderPath.endsWith("/");
if (isFolder && isRecursively) {
throw Error("upload function doesn't implement recursive function yet!");
} else if (isFolder && !isRecursively) {
if (uploadRaw) {
throw Error(`you specify uploadRaw, but you also provide a folder key!`);
}
// folder
if (cipher.isPasswordEmpty() || cipher.isFolderAware()) {
// if not encrypted, || encrypted isFolderAware, mkdir a remote folder
if (foldersCreatedBefore?.has(uploadFile)) {
// created, pass
} else {
// https://stackoverflow.com/questions/56479865/creating-nested-folders-in-one-go-onedrive-api
// use PATCH to create folder recursively!!!
let k: any = {
folder: {},
"@microsoft.graph.conflictBehavior": "replace",
};
if (mtime !== 0 && ctime !== 0) {
k = {
folder: {},
"@microsoft.graph.conflictBehavior": "replace",
fileSystemInfo: {
lastModifiedDateTime: mtimeStr,
createdDateTime: ctimeStr,
} as FileSystemInfo,
};
}
await client.patchJson(uploadFile, k);
}
const res = await getRemoteMeta(client, uploadFile);
return {
entity: res,
mtimeCli: mtime,
};
} else {
// if encrypted && !isFolderAware(),
// upload a fake, random-size file
// with the encrypted file name
const byteLengthRandom = getRandomIntInclusive(
1,
65536 /* max allowed */
);
const arrBufRandom = await cipher.encryptContent(
getRandomArrayBuffer(byteLengthRandom)
);
// an encrypted folder is always small, we just use put here
await client.putArrayBuffer(
`${uploadFile}:/content?${new URLSearchParams({
"@microsoft.graph.conflictBehavior": "replace",
})}`,
arrBufRandom
);
if (mtime !== 0 && ctime !== 0) {
await client.patchJson(`${uploadFile}`, {
fileSystemInfo: {
lastModifiedDateTime: mtimeStr,
createdDateTime: ctimeStr,
} as FileSystemInfo,
});
}
// console.info(uploadResult)
const res = await getRemoteMeta(client, uploadFile);
return {
entity: res,
mtimeCli: mtime,
};
}
} else {
// file
// we ignore isRecursively parameter here
let localContent = undefined;
if (uploadRaw) {
if (typeof rawContent === "string") {
localContent = new TextEncoder().encode(rawContent).buffer;
} else {
localContent = rawContent;
}
} else {
if (vault === undefined) {
throw new Error(
`the vault variable is not passed but we want to read ${fileOrFolderPath} for OneDrive`
);
}
localContent = await vault.adapter.readBinary(fileOrFolderPath);
}
let remoteContent = localContent;
if (!cipher.isPasswordEmpty()) {
remoteContent = await cipher.encryptContent(localContent);
}
const ctimeStr = new Date(ctime).toISOString();
const mtimeStr = new Date(mtime).toISOString();
// no need to create parent folders firstly, cool!
@ -857,16 +785,16 @@ export const uploadToRemote = async (
const RANGE_SIZE = MIN_UNIT * 20; // about 6.5536 MB
const DIRECT_UPLOAD_MAX_SIZE = 1000 * 1000 * 4; // 4 Megabyte
if (remoteContent.byteLength < DIRECT_UPLOAD_MAX_SIZE) {
if (content.byteLength < DIRECT_UPLOAD_MAX_SIZE) {
// directly using put!
await client.putArrayBuffer(
`${uploadFile}:/content?${new URLSearchParams({
await this._putArrayBuffer(
`${key}:/content?${new URLSearchParams({
"@microsoft.graph.conflictBehavior": "replace",
})}`,
remoteContent
content
);
if (mtime !== 0 && ctime !== 0) {
await client.patchJson(`${uploadFile}`, {
await this._patchJson(key, {
fileSystemInfo: {
lastModifiedDateTime: mtimeStr,
createdDateTime: ctimeStr,
@ -879,13 +807,13 @@ export const uploadToRemote = async (
// 1. create uploadSession
// uploadFile already starts with /drive/special/approot:/${remoteBaseDir}
let k: any = {
let playload: any = {
item: {
"@microsoft.graph.conflictBehavior": "replace",
},
};
if (mtime !== 0 && ctime !== 0) {
k = {
playload = {
item: {
"@microsoft.graph.conflictBehavior": "replace",
@ -897,9 +825,9 @@ export const uploadToRemote = async (
},
};
}
const s: UploadSession = await client.postJson(
`${uploadFile}:/createUploadSession`,
k
const s: UploadSession = await this._postJson(
`${key}:/createUploadSession`,
playload
);
const uploadUrl = s.uploadUrl!;
console.debug("uploadSession = ");
@ -907,12 +835,12 @@ export const uploadToRemote = async (
// 2. upload by ranges
// convert to uint8
const uint8 = new Uint8Array(remoteContent);
const uint8 = new Uint8Array(content);
// upload the ranges one by one
let rangeStart = 0;
while (rangeStart < uint8.byteLength) {
await client.putUint8ArrayByRange(
await this._putUint8ArrayByRange(
uploadUrl,
uint8,
rangeStart,
@ -923,132 +851,79 @@ export const uploadToRemote = async (
}
}
const res = await getRemoteMeta(client, uploadFile);
return {
entity: res,
mtimeCli: mtime,
};
}
};
const downloadFromRemoteRaw = async (
client: WrappedOnedriveClient,
remotePath: string
): Promise<ArrayBuffer> => {
await client.init();
const rsp = await client.getJson(
`${remotePath}?$select=@microsoft.graph.downloadUrl`
);
const downloadUrl: string = rsp["@microsoft.graph.downloadUrl"];
if (VALID_REQURL) {
const content = (
await requestUrl({
url: downloadUrl,
headers: { "Cache-Control": "no-cache" },
})
).arrayBuffer;
return content;
} else {
const content = await // cannot set no-cache here, will have cors error
(await fetch(downloadUrl)).arrayBuffer();
return content;
}
};
export const downloadFromRemote = async (
client: WrappedOnedriveClient,
fileOrFolderPath: string,
vault: Vault,
mtime: number,
cipher: Cipher,
remoteEncryptedKey: string = "",
skipSaving: boolean = false
) => {
await client.init();
const isFolder = fileOrFolderPath.endsWith("/");
if (!skipSaving) {
await mkdirpInVault(fileOrFolderPath, vault);
const res = await this._statFromRoot(key);
return res;
}
if (isFolder) {
// mkdirp locally is enough
// do nothing here
return new ArrayBuffer(0);
} else {
let downloadFile = fileOrFolderPath;
if (!cipher.isPasswordEmpty()) {
downloadFile = remoteEncryptedKey;
async readFile(key: string): Promise<ArrayBuffer> {
await this._init();
if (key.endsWith("/")) {
throw new Error(`you should not call readFile on folder ${key}`);
}
downloadFile = getOnedrivePath(downloadFile, client.remoteBaseDir);
const remoteContent = await downloadFromRemoteRaw(client, downloadFile);
let localContent = remoteContent;
if (!cipher.isPasswordEmpty()) {
localContent = await cipher.decryptContent(remoteContent);
const downloadFile = getOnedrivePath(key, this.remoteBaseDir);
return await this._readFileFromRoot(downloadFile);
}
async _readFileFromRoot(key: string): Promise<ArrayBuffer> {
const rsp = await this._getJson(
`${key}?$select=@microsoft.graph.downloadUrl`
);
const downloadUrl: string = rsp["@microsoft.graph.downloadUrl"];
if (VALID_REQURL) {
const content = (
await requestUrl({
url: downloadUrl,
headers: { "Cache-Control": "no-cache" },
})
).arrayBuffer;
return content;
} else {
// cannot set no-cache here, will have cors error
const content = await (await fetch(downloadUrl)).arrayBuffer();
return content;
}
if (!skipSaving) {
await vault.adapter.writeBinary(fileOrFolderPath, localContent, {
mtime: mtime,
});
}
async rm(key: string): Promise<void> {
if (key === "" || key === "/") {
return;
}
return localContent;
}
};
const remoteFileName = getOnedrivePath(key, this.remoteBaseDir);
export const deleteFromRemote = async (
client: WrappedOnedriveClient,
fileOrFolderPath: string,
cipher: Cipher,
remoteEncryptedKey: string = ""
) => {
if (fileOrFolderPath === "/") {
return;
await this._init();
await this._deleteJson(remoteFileName);
}
let remoteFileName = fileOrFolderPath;
if (!cipher.isPasswordEmpty()) {
remoteFileName = remoteEncryptedKey;
}
remoteFileName = getOnedrivePath(remoteFileName, client.remoteBaseDir);
await client.init();
await client.deleteJson(remoteFileName);
};
export const checkConnectivity = async (
client: WrappedOnedriveClient,
callbackFunc?: any
) => {
try {
const k = await getUserDisplayName(client);
return k !== "<unknown display name>";
} catch (err) {
console.debug(err);
if (callbackFunc !== undefined) {
callbackFunc(err);
async checkConnect(callbackFunc?: any): Promise<boolean> {
try {
const k = await this.getUserDisplayName();
return k !== "<unknown display name>";
} catch (err) {
console.debug(err);
callbackFunc?.(err);
return false;
}
return false;
}
};
export const getUserDisplayName = async (client: WrappedOnedriveClient) => {
await client.init();
const res: User = await client.getJson("/me?$select=displayName");
return res.displayName || "<unknown display name>";
};
async getUserDisplayName() {
await this._init();
const res: User = await this._getJson("/me?$select=displayName");
return res.displayName || "<unknown display name>";
}
/**
*
* https://docs.microsoft.com/en-us/azure/active-directory/develop/v2-protocols-oidc#send-a-sign-out-request
* https://docs.microsoft.com/en-us/graph/api/user-revokesigninsessions
* https://docs.microsoft.com/en-us/graph/api/user-invalidateallrefreshtokens
* @param client
*/
// export const revokeAuth = async (client: WrappedOnedriveClient) => {
// await client.init();
// await client.postJson('/me/revokeSignInSessions', {});
// };
/**
*
* https://docs.microsoft.com/en-us/azure/active-directory/develop/v2-protocols-oidc#send-a-sign-out-request
* https://docs.microsoft.com/en-us/graph/api/user-revokesigninsessions
* https://docs.microsoft.com/en-us/graph/api/user-invalidateallrefreshtokens
*/
async revokeAuth() {
// await this._init();
// await this._postJson("/me/revokeSignInSessions", {});
throw new Error("Method not implemented.");
}
export const getRevokeAddr = async () => {
return "https://account.live.com/consent/Manage";
};
async getRevokeAddr() {
return "https://account.live.com/consent/Manage";
}
}

751
src/fsS3.ts Normal file
View File

@ -0,0 +1,751 @@
import type { _Object, PutObjectCommandInput } from "@aws-sdk/client-s3";
import {
DeleteObjectCommand,
GetObjectCommand,
HeadObjectCommand,
HeadObjectCommandOutput,
ListObjectsV2Command,
ListObjectsV2CommandInput,
PutObjectCommand,
S3Client,
} from "@aws-sdk/client-s3";
import { Upload } from "@aws-sdk/lib-storage";
import { HttpRequest, HttpResponse } from "@smithy/protocol-http";
import {
FetchHttpHandler,
FetchHttpHandlerOptions,
} from "@smithy/fetch-http-handler";
// @ts-ignore
import { requestTimeout } from "@smithy/fetch-http-handler/dist-es/request-timeout";
import { buildQueryString } from "@smithy/querystring-builder";
import { HttpHandlerOptions } from "@aws-sdk/types";
import { Buffer } from "buffer";
import * as mime from "mime-types";
import { Platform, requestUrl, RequestUrlParam } from "obsidian";
import { Readable } from "stream";
import * as path from "path";
import AggregateError from "aggregate-error";
import { DEFAULT_CONTENT_TYPE, S3Config, VALID_REQURL } from "./baseTypes";
import { bufferToArrayBuffer } from "./misc";
import PQueue from "p-queue";
import { Entity } from "./baseTypes";
import { FakeFs } from "./fsAll";
////////////////////////////////////////////////////////////////////////////////
// special handler using Obsidian requestUrl
////////////////////////////////////////////////////////////////////////////////
/**
* This is close to origin implementation of FetchHttpHandler
* https://github.com/aws/aws-sdk-js-v3/blob/main/packages/fetch-http-handler/src/fetch-http-handler.ts
* that is released under Apache 2 License.
* But this uses Obsidian requestUrl instead.
*/
class ObsHttpHandler extends FetchHttpHandler {
requestTimeoutInMs: number | undefined;
reverseProxyNoSignUrl: string | undefined;
constructor(
options?: FetchHttpHandlerOptions,
reverseProxyNoSignUrl?: string
) {
super(options);
this.requestTimeoutInMs =
options === undefined ? undefined : options.requestTimeout;
this.reverseProxyNoSignUrl = reverseProxyNoSignUrl;
}
async handle(
request: HttpRequest,
{ abortSignal }: HttpHandlerOptions = {}
): Promise<{ response: HttpResponse }> {
if (abortSignal?.aborted) {
const abortError = new Error("Request aborted");
abortError.name = "AbortError";
return Promise.reject(abortError);
}
let path = request.path;
if (request.query) {
const queryString = buildQueryString(request.query);
if (queryString) {
path += `?${queryString}`;
}
}
const { port, method } = request;
let url = `${request.protocol}//${request.hostname}${
port ? `:${port}` : ""
}${path}`;
if (
this.reverseProxyNoSignUrl !== undefined &&
this.reverseProxyNoSignUrl !== ""
) {
const urlObj = new URL(url);
urlObj.host = this.reverseProxyNoSignUrl;
url = urlObj.href;
}
const body =
method === "GET" || method === "HEAD" ? undefined : request.body;
const transformedHeaders: Record<string, string> = {};
for (const key of Object.keys(request.headers)) {
const keyLower = key.toLowerCase();
if (keyLower === "host" || keyLower === "content-length") {
continue;
}
transformedHeaders[keyLower] = request.headers[key];
}
let contentType: string | undefined = undefined;
if (transformedHeaders["content-type"] !== undefined) {
contentType = transformedHeaders["content-type"];
}
let transformedBody: any = body;
if (ArrayBuffer.isView(body)) {
transformedBody = bufferToArrayBuffer(body);
}
const param: RequestUrlParam = {
body: transformedBody,
headers: transformedHeaders,
method: method,
url: url,
contentType: contentType,
};
const raceOfPromises = [
requestUrl(param).then((rsp) => {
const headers = rsp.headers;
const headersLower: Record<string, string> = {};
for (const key of Object.keys(headers)) {
headersLower[key.toLowerCase()] = headers[key];
}
const stream = new ReadableStream<Uint8Array>({
start(controller) {
controller.enqueue(new Uint8Array(rsp.arrayBuffer));
controller.close();
},
});
return {
response: new HttpResponse({
headers: headersLower,
statusCode: rsp.status,
body: stream,
}),
};
}),
requestTimeout(this.requestTimeoutInMs),
];
if (abortSignal) {
raceOfPromises.push(
new Promise<never>((resolve, reject) => {
abortSignal.onabort = () => {
const abortError = new Error("Request aborted");
abortError.name = "AbortError";
reject(abortError);
};
})
);
}
return Promise.race(raceOfPromises);
}
}
////////////////////////////////////////////////////////////////////////////////
// other stuffs
////////////////////////////////////////////////////////////////////////////////
export const simpleTransRemotePrefix = (x: string) => {
if (x === undefined) {
return "";
}
let y = path.posix.normalize(x.trim());
if (y === undefined || y === "" || y === "/" || y === ".") {
return "";
}
if (y.startsWith("/")) {
y = y.slice(1);
}
if (!y.endsWith("/")) {
y = `${y}/`;
}
return y;
};
export const DEFAULT_S3_CONFIG: S3Config = {
s3Endpoint: "",
s3Region: "",
s3AccessKeyID: "",
s3SecretAccessKey: "",
s3BucketName: "",
bypassCorsLocally: true,
partsConcurrency: 20,
forcePathStyle: false,
remotePrefix: "",
useAccurateMTime: false, // it causes money, disable by default
reverseProxyNoSignUrl: "",
};
/**
* The Body of resp of aws GetObject has mix types
* and we want to get ArrayBuffer here.
* See https://github.com/aws/aws-sdk-js-v3/issues/1877
* @param b The Body of GetObject
* @returns Promise<ArrayBuffer>
*/
const getObjectBodyToArrayBuffer = async (
b: Readable | ReadableStream | Blob | undefined
) => {
if (b === undefined) {
throw Error(`ObjectBody is undefined and don't know how to deal with it`);
}
if (b instanceof Readable) {
return (await new Promise((resolve, reject) => {
const chunks: Uint8Array[] = [];
b.on("data", (chunk) => chunks.push(chunk));
b.on("error", reject);
b.on("end", () => resolve(bufferToArrayBuffer(Buffer.concat(chunks))));
})) as ArrayBuffer;
} else if (b instanceof ReadableStream) {
return await new Response(b, {}).arrayBuffer();
} else if (b instanceof Blob) {
return await b.arrayBuffer();
} else {
throw TypeError(`The type of ${b} is not one of the supported types`);
}
};
const getS3Client = (s3Config: S3Config) => {
let endpoint = s3Config.s3Endpoint;
if (!(endpoint.startsWith("http://") || endpoint.startsWith("https://"))) {
endpoint = `https://${endpoint}`;
}
let s3Client: S3Client;
if (VALID_REQURL && s3Config.bypassCorsLocally) {
s3Client = new S3Client({
region: s3Config.s3Region,
endpoint: endpoint,
forcePathStyle: s3Config.forcePathStyle,
credentials: {
accessKeyId: s3Config.s3AccessKeyID,
secretAccessKey: s3Config.s3SecretAccessKey,
},
requestHandler: new ObsHttpHandler(
undefined,
s3Config.reverseProxyNoSignUrl
),
});
} else {
s3Client = new S3Client({
region: s3Config.s3Region,
endpoint: endpoint,
forcePathStyle: s3Config.forcePathStyle,
credentials: {
accessKeyId: s3Config.s3AccessKeyID,
secretAccessKey: s3Config.s3SecretAccessKey,
},
});
}
s3Client.middlewareStack.add(
(next, context) => (args) => {
(args.request as any).headers["cache-control"] = "no-cache";
return next(args);
},
{
step: "build",
}
);
return s3Client;
};
const getLocalNoPrefixPath = (
fileOrFolderPathWithRemotePrefix: string,
remotePrefix: string
) => {
if (
!(
fileOrFolderPathWithRemotePrefix === `${remotePrefix}` ||
fileOrFolderPathWithRemotePrefix.startsWith(`${remotePrefix}`)
)
) {
throw Error(
`"${fileOrFolderPathWithRemotePrefix}" doesn't starts with "${remotePrefix}"`
);
}
return fileOrFolderPathWithRemotePrefix.slice(`${remotePrefix}`.length);
};
const getRemoteWithPrefixPath = (
fileOrFolderPath: string,
remotePrefix: string
) => {
if (remotePrefix === undefined || remotePrefix === "") {
return fileOrFolderPath;
}
let key = fileOrFolderPath;
if (fileOrFolderPath === "/" || fileOrFolderPath === "") {
// special
key = remotePrefix;
}
if (!fileOrFolderPath.startsWith("/")) {
key = `${remotePrefix}${fileOrFolderPath}`;
}
return key;
};
const fromS3ObjectToEntity = (
x: _Object,
remotePrefix: string,
mtimeRecords: Record<string, number>,
ctimeRecords: Record<string, number>
) => {
// console.debug(`fromS3ObjectToEntity: ${x.Key!}, ${JSON.stringify(x,null,2)}`);
// S3 officially only supports seconds precision!!!!!
const mtimeSvr = Math.floor(x.LastModified!.valueOf() / 1000.0) * 1000;
let mtimeCli = mtimeSvr;
if (x.Key! in mtimeRecords) {
const m2 = mtimeRecords[x.Key!];
if (m2 !== 0) {
// to be compatible with RClone, we read and store the time in seconds in new version!
if (m2 >= 1000000000000) {
// it's a millsecond, uploaded by old codes..
mtimeCli = m2;
} else {
// it's a second, uploaded by new codes of the plugin from March 24, 2024
mtimeCli = m2 * 1000;
}
}
}
const key = getLocalNoPrefixPath(x.Key!, remotePrefix); // we remove prefix here
const r: Entity = {
key: key, // from s3's repsective, the keyRaw is the key, we will change it in decyption
keyRaw: key,
mtimeSvr: mtimeSvr,
mtimeCli: mtimeCli,
sizeRaw: x.Size!,
size: x.Size!, // from s3's repsective, the sizeRaw is the size, we will change it in decyption
etag: x.ETag,
synthesizedFolder: false,
};
return r;
};
const fromS3HeadObjectToEntity = (
fileOrFolderPathWithRemotePrefix: string,
x: HeadObjectCommandOutput,
remotePrefix: string
) => {
// console.debug(`fromS3HeadObjectToEntity: ${fileOrFolderPathWithRemotePrefix}: ${JSON.stringify(x,null,2)}`);
// S3 officially only supports seconds precision!!!!!
const mtimeSvr = Math.floor(x.LastModified!.valueOf() / 1000.0) * 1000;
let mtimeCli = mtimeSvr;
if (x.Metadata !== undefined) {
const m2 = Math.floor(
parseFloat(x.Metadata.mtime || x.Metadata.MTime || "0")
);
if (m2 !== 0) {
// to be compatible with RClone, we read and store the time in seconds in new version!
if (m2 >= 1000000000000) {
// it's a millsecond, uploaded by old codes..
mtimeCli = m2;
} else {
// it's a second, uploaded by new codes of the plugin from March 24, 2024
mtimeCli = m2 * 1000;
}
}
}
// console.debug(
// `fromS3HeadObjectToEntity, fileOrFolderPathWithRemotePrefix=${fileOrFolderPathWithRemotePrefix}, remotePrefix=${remotePrefix}, x=${JSON.stringify(
// x
// )} `
// );
const key = getLocalNoPrefixPath(
fileOrFolderPathWithRemotePrefix,
remotePrefix
);
// console.debug(`fromS3HeadObjectToEntity, key=${key} after removing prefix`);
return {
key: key,
keyRaw: key,
mtimeSvr: mtimeSvr,
mtimeCli: mtimeCli,
sizeRaw: x.ContentLength,
size: x.ContentLength,
etag: x.ETag,
synthesizedFolder: false,
} as Entity;
};
export class FakeFsS3 extends FakeFs {
s3Config: S3Config;
s3Client: S3Client;
kind: "s3";
constructor(s3Config: S3Config) {
super();
this.s3Config = s3Config;
this.s3Client = getS3Client(s3Config);
this.kind = "s3";
}
async walk(): Promise<Entity[]> {
const res = (await this._walkFromRoot(this.s3Config.remotePrefix)).filter(
(x) => x.key !== "" && x.key !== "/"
);
return res;
}
/**
* the input key contains basedir (prefix),
* but the result doesn't contain it.
*/
async _walkFromRoot(prefixOfRawKeys: string | undefined) {
const confCmd = {
Bucket: this.s3Config.s3BucketName,
} as ListObjectsV2CommandInput;
if (prefixOfRawKeys !== undefined && prefixOfRawKeys !== "") {
confCmd.Prefix = prefixOfRawKeys;
}
const contents = [] as _Object[];
const mtimeRecords: Record<string, number> = {};
const ctimeRecords: Record<string, number> = {};
const queueHead = new PQueue({
concurrency: this.s3Config.partsConcurrency,
autoStart: true,
});
queueHead.on("error", (error) => {
queueHead.pause();
queueHead.clear();
throw error;
});
let isTruncated = true;
do {
const rsp = await this.s3Client.send(new ListObjectsV2Command(confCmd));
if (rsp.$metadata.httpStatusCode !== 200) {
throw Error("some thing bad while listing remote!");
}
if (rsp.Contents === undefined) {
break;
}
contents.push(...rsp.Contents);
if (this.s3Config.useAccurateMTime) {
// head requests of all objects, love it
for (const content of rsp.Contents) {
queueHead.add(async () => {
const rspHead = await this.s3Client.send(
new HeadObjectCommand({
Bucket: this.s3Config.s3BucketName,
Key: content.Key,
})
);
if (rspHead.$metadata.httpStatusCode !== 200) {
throw Error("some thing bad while heading single object!");
}
if (rspHead.Metadata === undefined) {
// pass
} else {
mtimeRecords[content.Key!] = Math.floor(
parseFloat(
rspHead.Metadata.mtime || rspHead.Metadata.MTime || "0"
)
);
ctimeRecords[content.Key!] = Math.floor(
parseFloat(
rspHead.Metadata.ctime || rspHead.Metadata.CTime || "0"
)
);
}
});
}
}
isTruncated = rsp.IsTruncated ?? false;
confCmd.ContinuationToken = rsp.NextContinuationToken;
if (
isTruncated &&
(confCmd.ContinuationToken === undefined ||
confCmd.ContinuationToken === "")
) {
throw Error("isTruncated is true but no continuationToken provided");
}
} while (isTruncated);
// wait for any head requests
await queueHead.onIdle();
// ensemble fake rsp
// in the end, we need to transform the response list
// back to the local contents-alike list
return contents.map((x) =>
fromS3ObjectToEntity(
x,
this.s3Config.remotePrefix ?? "",
mtimeRecords,
ctimeRecords
)
);
}
async stat(key: string): Promise<Entity> {
let keyFullPath = key;
keyFullPath = getRemoteWithPrefixPath(
keyFullPath,
this.s3Config.remotePrefix ?? ""
);
return await this._statFromRoot(keyFullPath);
}
/**
* the input key contains basedir (prefix),
* but the result doesn't contain it.
*/
async _statFromRoot(key: string): Promise<Entity> {
if (
this.s3Config.remotePrefix !== undefined &&
this.s3Config.remotePrefix !== "" &&
!key.startsWith(this.s3Config.remotePrefix)
) {
throw Error(`_statFromRoot should only accept prefix-ed path`);
}
const res = await this.s3Client.send(
new HeadObjectCommand({
Bucket: this.s3Config.s3BucketName,
Key: key,
})
);
return fromS3HeadObjectToEntity(key, res, this.s3Config.remotePrefix ?? "");
}
async mkdir(key: string, mtime?: number, ctime?: number): Promise<Entity> {
if (!key.endsWith("/")) {
throw new Error(`You should not call mkdir on ${key}!`);
}
const uploadFile = getRemoteWithPrefixPath(
key,
this.s3Config.remotePrefix ?? ""
);
return await this._mkdirFromRoot(uploadFile, mtime, ctime);
}
async _mkdirFromRoot(key: string, mtime?: number, ctime?: number) {
if (
this.s3Config.remotePrefix !== undefined &&
this.s3Config.remotePrefix !== "" &&
!key.startsWith(this.s3Config.remotePrefix)
) {
throw Error(`_mkdirFromRoot should only accept prefix-ed path`);
}
const contentType = DEFAULT_CONTENT_TYPE;
const p: PutObjectCommandInput = {
Bucket: this.s3Config.s3BucketName,
Key: key,
Body: "",
ContentType: contentType,
ContentLength: 0, // interesting we need to set this to avoid the warning
};
const metadata: Record<string, string> = {};
if (mtime !== undefined && mtime !== 0) {
metadata["MTime"] = `${mtime / 1000.0}`;
}
if (ctime !== undefined && ctime !== 0) {
metadata["CTime"] = `${ctime / 1000.0}`;
}
if (Object.keys(metadata).length > 0) {
p["Metadata"] = metadata;
}
await this.s3Client.send(new PutObjectCommand(p));
return await this._statFromRoot(key);
}
async writeFile(
key: string,
content: ArrayBuffer,
mtime: number,
ctime: number
): Promise<Entity> {
const uploadFile = getRemoteWithPrefixPath(
key,
this.s3Config.remotePrefix ?? ""
);
const res = await this._writeFileFromRoot(
uploadFile,
content,
mtime,
ctime
);
return res;
}
/**
* the input key contains basedir (prefix),
* but the result doesn't contain it.
*/
async _writeFileFromRoot(
key: string,
content: ArrayBuffer,
mtime: number,
ctime: number
): Promise<Entity> {
if (
this.s3Config.remotePrefix !== undefined &&
this.s3Config.remotePrefix !== "" &&
!key.startsWith(this.s3Config.remotePrefix)
) {
throw Error(`_writeFileFromRoot should only accept prefix-ed path`);
}
const bytesIn5MB = 5242880;
const body = new Uint8Array(content);
let contentType = DEFAULT_CONTENT_TYPE;
contentType =
mime.contentType(mime.lookup(key) || DEFAULT_CONTENT_TYPE) ||
DEFAULT_CONTENT_TYPE;
const upload = new Upload({
client: this.s3Client,
queueSize: this.s3Config.partsConcurrency, // concurrency
partSize: bytesIn5MB, // minimal 5MB by default
leavePartsOnError: false,
params: {
Bucket: this.s3Config.s3BucketName,
Key: key,
Body: body,
ContentType: contentType,
Metadata: {
MTime: `${mtime / 1000.0}`,
CTime: `${ctime / 1000.0}`,
},
},
});
upload.on("httpUploadProgress", (progress) => {
// console.info(progress);
});
await upload.done();
return await this._statFromRoot(key);
}
async readFile(key: string): Promise<ArrayBuffer> {
if (key.endsWith("/")) {
throw new Error(`you should not call readFile on folder ${key}`);
}
const downloadFile = getRemoteWithPrefixPath(
key,
this.s3Config.remotePrefix ?? ""
);
return await this._readFileFromRoot(downloadFile);
}
async _readFileFromRoot(key: string): Promise<ArrayBuffer> {
if (
this.s3Config.remotePrefix !== undefined &&
this.s3Config.remotePrefix !== "" &&
!key.startsWith(this.s3Config.remotePrefix)
) {
throw Error(`_readFileFromRoot should only accept prefix-ed path`);
}
const data = await this.s3Client.send(
new GetObjectCommand({
Bucket: this.s3Config.s3BucketName,
Key: key,
})
);
const bodyContents = await getObjectBodyToArrayBuffer(data.Body);
return bodyContents;
}
async rm(key: string): Promise<void> {
if (key === "/") {
return;
}
const remoteFileName = getRemoteWithPrefixPath(
key,
this.s3Config.remotePrefix ?? ""
);
await this.s3Client.send(
new DeleteObjectCommand({
Bucket: this.s3Config.s3BucketName,
Key: remoteFileName,
})
);
// TODO: do we need to delete folder recursively?
// maybe we should not
// because the outer sync algorithm should do that
// (await this._walkFromRoot(remoteFileName)).map(...)
}
async checkConnect(callbackFunc?: any): Promise<boolean> {
try {
// TODO: no universal way now, just check this in connectivity
if (Platform.isIosApp && this.s3Config.s3Endpoint.startsWith("http://")) {
throw Error(
`Your s3 endpoint could only be https, not http, because of the iOS restriction.`
);
}
// const results = await this.s3Client.send(
// new HeadBucketCommand({ Bucket: this.s3Config.s3BucketName })
// );
// very simplified version of listing objects
const confCmd = {
Bucket: this.s3Config.s3BucketName,
} as ListObjectsV2CommandInput;
const results = await this.s3Client.send(
new ListObjectsV2Command(confCmd)
);
if (
results === undefined ||
results.$metadata === undefined ||
results.$metadata.httpStatusCode === undefined
) {
const err = "results or $metadata or httStatusCode is undefined";
console.debug(err);
if (callbackFunc !== undefined) {
callbackFunc(err);
}
return false;
}
return results.$metadata.httpStatusCode === 200;
} catch (err: any) {
console.debug(err);
if (callbackFunc !== undefined) {
if (this.s3Config.s3Endpoint.contains(this.s3Config.s3BucketName)) {
const err2 = new AggregateError([
err,
new Error(
"Maybe you've included the bucket name inside the endpoint setting. Please remove the bucket name and try again."
),
]);
callbackFunc(err2);
} else {
callbackFunc(err);
}
}
return false;
}
}
async getUserDisplayName(): Promise<string> {
throw new Error("Method not implemented.");
}
async revokeAuth() {
throw new Error("Method not implemented.");
}
}

494
src/fsWebdav.ts Normal file
View File

@ -0,0 +1,494 @@
import { getReasonPhrase } from "http-status-codes/build/cjs/utils-functions";
import { Buffer } from "buffer";
import cloneDeep from "lodash/cloneDeep";
import { Queue } from "@fyears/tsqueue";
import chunk from "lodash/chunk";
import flatten from "lodash/flatten";
import { Platform, requestUrl } from "obsidian";
import { FakeFs } from "./fsAll";
import { bufferToArrayBuffer } from "./misc";
import { Entity, VALID_REQURL, WebdavConfig } from "./baseTypes";
import type {
FileStat,
WebDAVClient,
RequestOptionsWithState,
// Response,
// ResponseDataDetailed,
} from "webdav";
/**
* https://stackoverflow.com/questions/32850898/how-to-check-if-a-string-has-any-non-iso-8859-1-characters-with-javascript
* @param str
* @returns true if all are iso 8859 1 chars
*/
function onlyAscii(str: string) {
return !/[^\u0000-\u00ff]/g.test(str);
}
/**
* https://stackoverflow.com/questions/12539574/
* @param obj
* @returns
*/
function objKeyToLower(obj: Record<string, string>) {
return Object.fromEntries(
Object.entries(obj).map(([k, v]) => [k.toLowerCase(), v])
);
}
// @ts-ignore
import { getPatcher } from "webdav/dist/web/index.js";
if (VALID_REQURL) {
getPatcher().patch(
"request",
async (options: RequestOptionsWithState): Promise<Response> => {
const transformedHeaders = objKeyToLower({ ...options.headers });
delete transformedHeaders["host"];
delete transformedHeaders["content-length"];
const reqContentType =
transformedHeaders["accept"] ?? transformedHeaders["content-type"];
const retractedHeaders = { ...transformedHeaders };
if (retractedHeaders.hasOwnProperty("authorization")) {
retractedHeaders["authorization"] = "<retracted>";
}
console.debug(`before request:`);
console.debug(`url: ${options.url}`);
console.debug(`method: ${options.method}`);
console.debug(`headers: ${JSON.stringify(retractedHeaders, null, 2)}`);
console.debug(`reqContentType: ${reqContentType}`);
let r = await requestUrl({
url: options.url,
method: options.method,
body: options.data as string | ArrayBuffer,
headers: transformedHeaders,
contentType: reqContentType,
throw: false,
});
if (
r.status === 401 &&
Platform.isIosApp &&
!options.url.endsWith("/") &&
!options.url.endsWith(".md") &&
options.method.toUpperCase() === "PROPFIND"
) {
// don't ask me why,
// some webdav servers have some mysterious behaviours,
// if a folder doesn't exist without slash, the servers return 401 instead of 404
// here is a dirty hack that works
console.debug(`so we have 401, try appending request url with slash`);
r = await requestUrl({
url: `${options.url}/`,
method: options.method,
body: options.data as string | ArrayBuffer,
headers: transformedHeaders,
contentType: reqContentType,
throw: false,
});
}
console.debug(`after request:`);
const rspHeaders = objKeyToLower({ ...r.headers });
console.debug(`rspHeaders: ${JSON.stringify(rspHeaders, null, 2)}`);
for (let key in rspHeaders) {
if (rspHeaders.hasOwnProperty(key)) {
// avoid the error:
// Failed to read the 'headers' property from 'ResponseInit': String contains non ISO-8859-1 code point.
// const possibleNonAscii = [
// "Content-Disposition",
// "X-Accel-Redirect",
// "X-Outfilename",
// "X-Sendfile"
// ];
// for (const p of possibleNonAscii) {
// if (key === p || key === p.toLowerCase()) {
// rspHeaders[key] = encodeURIComponent(rspHeaders[key]);
// }
// }
if (!onlyAscii(rspHeaders[key])) {
console.debug(`rspHeaders[key] needs encode: ${key}`);
rspHeaders[key] = encodeURIComponent(rspHeaders[key]);
}
}
}
let r2: Response | undefined = undefined;
const statusText = getReasonPhrase(r.status);
console.debug(`statusText: ${statusText}`);
if ([101, 103, 204, 205, 304].includes(r.status)) {
// A null body status is a status that is 101, 103, 204, 205, or 304.
// https://fetch.spec.whatwg.org/#statuses
// fix this: Failed to construct 'Response': Response with null body status cannot have body
r2 = new Response(null, {
status: r.status,
statusText: statusText,
headers: rspHeaders,
});
} else {
r2 = new Response(r.arrayBuffer, {
status: r.status,
statusText: statusText,
headers: rspHeaders,
});
}
return r2;
}
);
}
// @ts-ignore
import { AuthType, BufferLike, createClient } from "webdav/dist/web/index.js";
export const DEFAULT_WEBDAV_CONFIG = {
address: "",
username: "",
password: "",
authType: "basic",
manualRecursive: true,
depth: "manual_1",
remoteBaseDir: "",
} as WebdavConfig;
const getWebdavPath = (fileOrFolderPath: string, remoteBaseDir: string) => {
let key = fileOrFolderPath;
if (fileOrFolderPath === "/" || fileOrFolderPath === "") {
// special
key = `/${remoteBaseDir}/`;
} else if (fileOrFolderPath.startsWith("/")) {
console.warn(
`why the path ${fileOrFolderPath} starts with '/'? but we just go on.`
);
key = `/${remoteBaseDir}${fileOrFolderPath}`;
} else {
key = `/${remoteBaseDir}/${fileOrFolderPath}`;
}
return key;
};
const getNormPath = (fileOrFolderPath: string, remoteBaseDir: string) => {
if (
!(
fileOrFolderPath === `/${remoteBaseDir}` ||
fileOrFolderPath.startsWith(`/${remoteBaseDir}/`)
)
) {
throw Error(
`"${fileOrFolderPath}" doesn't starts with "/${remoteBaseDir}/"`
);
}
return fileOrFolderPath.slice(`/${remoteBaseDir}/`.length);
};
const fromWebdavItemToEntity = (x: FileStat, remoteBaseDir: string): Entity => {
let key = getNormPath(x.filename, remoteBaseDir);
if (x.type === "directory" && !key.endsWith("/")) {
key = `${key}/`;
}
const mtimeSvr = Date.parse(x.lastmod).valueOf();
return {
key: key,
keyRaw: key,
mtimeSvr: mtimeSvr,
mtimeCli: mtimeSvr, // TODO: no universal way to set mtime in webdav
size: x.size,
sizeRaw: x.size,
};
};
export class FakeFsWebdav extends FakeFs {
kind: "webdav";
webdavConfig: WebdavConfig;
remoteBaseDir: string;
client!: WebDAVClient;
vaultFolderExists: boolean;
saveUpdatedConfigFunc: () => Promise<any>;
constructor(
webdavConfig: WebdavConfig,
vaultName: string,
saveUpdatedConfigFunc: () => Promise<any>
) {
super();
this.kind = "webdav";
this.webdavConfig = cloneDeep(webdavConfig);
this.webdavConfig.address = encodeURI(this.webdavConfig.address);
this.remoteBaseDir = this.webdavConfig.remoteBaseDir || vaultName || "";
this.vaultFolderExists = false;
this.saveUpdatedConfigFunc = saveUpdatedConfigFunc;
}
async _init() {
// init client if not inited
if (this.client !== undefined) {
return;
}
if (Platform.isIosApp && !this.webdavConfig.address.startsWith("https")) {
throw Error(
`Your webdav address could only be https, not http, because of the iOS restriction.`
);
}
const headers = {
"Cache-Control": "no-cache",
};
if (
this.webdavConfig.username !== "" &&
this.webdavConfig.password !== ""
) {
this.client = createClient(this.webdavConfig.address, {
username: this.webdavConfig.username,
password: this.webdavConfig.password,
headers: headers,
authType:
this.webdavConfig.authType === "digest"
? AuthType.Digest
: AuthType.Password,
});
} else {
console.info("no password");
this.client = createClient(this.webdavConfig.address, {
headers: headers,
});
}
// check vault folder
if (this.vaultFolderExists) {
// pass
} else {
const res = await this.client.exists(`/${this.remoteBaseDir}/`);
if (res) {
// console.info("remote vault folder exits!");
this.vaultFolderExists = true;
} else {
console.info("remote vault folder not exists, creating");
await this.client.createDirectory(`/${this.remoteBaseDir}/`);
console.info("remote vault folder created!");
this.vaultFolderExists = true;
}
}
// adjust depth parameter
if (
this.webdavConfig.depth === "auto" ||
this.webdavConfig.depth === "auto_1" ||
this.webdavConfig.depth === "auto_infinity" ||
this.webdavConfig.depth === "auto_unknown"
) {
this.webdavConfig.depth = "manual_1";
this.webdavConfig.manualRecursive = true;
if (this.saveUpdatedConfigFunc !== undefined) {
await this.saveUpdatedConfigFunc();
console.info(
`webdav depth="auto_???" is changed to ${this.webdavConfig.depth}`
);
}
}
}
async walk(): Promise<Entity[]> {
await this._init();
let contents = [] as FileStat[];
if (
this.webdavConfig.depth === "auto" ||
this.webdavConfig.depth === "auto_unknown" ||
this.webdavConfig.depth === "auto_1" ||
this.webdavConfig.depth === "auto_infinity" /* don't trust auto now */ ||
this.webdavConfig.depth === "manual_1"
) {
// the remote doesn't support infinity propfind,
// we need to do a bfs here
const q = new Queue([`/${this.remoteBaseDir}`]);
const CHUNK_SIZE = 10;
while (q.length > 0) {
const itemsToFetch: string[] = [];
while (q.length > 0) {
itemsToFetch.push(q.pop()!);
}
const itemsToFetchChunks = chunk(itemsToFetch, CHUNK_SIZE);
// console.debug(itemsToFetchChunks);
const subContents = [] as FileStat[];
for (const singleChunk of itemsToFetchChunks) {
const r = singleChunk.map((x) => {
return this.client.getDirectoryContents(x, {
deep: false,
details: false /* no need for verbose details here */,
// TODO: to support .obsidian,
// we need to load all files including dot,
// anyway to reduce the resources?
// glob: "/**" /* avoid dot files by using glob */,
}) as Promise<FileStat[]>;
});
const r2 = flatten(await Promise.all(r));
subContents.push(...r2);
}
for (let i = 0; i < subContents.length; ++i) {
const f = subContents[i];
contents.push(f);
if (f.type === "directory") {
q.push(f.filename);
}
}
}
} else {
// the remote supports infinity propfind
contents = (await this.client.getDirectoryContents(
`/${this.remoteBaseDir}`,
{
deep: true,
details: false /* no need for verbose details here */,
// TODO: to support .obsidian,
// we need to load all files including dot,
// anyway to reduce the resources?
// glob: "/**" /* avoid dot files by using glob */,
}
)) as FileStat[];
}
return contents.map((x) => fromWebdavItemToEntity(x, this.remoteBaseDir));
}
async stat(key: string): Promise<Entity> {
await this._init();
const fullPath = getWebdavPath(key, this.remoteBaseDir);
return await this._statFromRoot(fullPath);
}
async _statFromRoot(key: string): Promise<Entity> {
const res = (await this.client.stat(key, {
details: false,
})) as FileStat;
return fromWebdavItemToEntity(res, this.remoteBaseDir);
}
async mkdir(key: string, mtime?: number, ctime?: number): Promise<Entity> {
if (!key.endsWith("/")) {
throw Error(`you should not call mkdir on ${key}`);
}
await this._init();
const uploadFile = getWebdavPath(key, this.remoteBaseDir);
return await this._mkdirFromRoot(uploadFile, mtime, ctime);
}
async _mkdirFromRoot(
key: string,
mtime?: number,
ctime?: number
): Promise<Entity> {
await this.client.createDirectory(key, {
recursive: true,
});
return await this._statFromRoot(key);
}
async writeFile(
key: string,
content: ArrayBuffer,
mtime: number,
ctime: number
): Promise<Entity> {
if (key.endsWith("/")) {
throw Error(`you should not call writeFile on ${key}`);
}
await this._init();
const uploadFile = getWebdavPath(key, this.remoteBaseDir);
return await this._writeFileFromRoot(uploadFile, content, mtime, ctime);
}
async _writeFileFromRoot(
key: string,
content: ArrayBuffer,
mtime: number,
ctime: number
): Promise<Entity> {
await this.client.putFileContents(key, content, {
overwrite: true,
onUploadProgress: (progress: any) => {
console.info(`Uploaded ${progress.loaded} bytes of ${progress.total}`);
},
});
return await this._statFromRoot(key);
}
async readFile(key: string): Promise<ArrayBuffer> {
if (key.endsWith("/")) {
throw Error(`you should not call readFile on ${key}`);
}
await this._init();
const downloadFile = getWebdavPath(key, this.remoteBaseDir);
return await this._readFileFromRoot(downloadFile);
}
async _readFileFromRoot(key: string): Promise<ArrayBuffer> {
const buff = (await this.client.getFileContents(key)) as BufferLike;
if (buff instanceof ArrayBuffer) {
return buff;
} else if (buff instanceof Buffer) {
return bufferToArrayBuffer(buff);
}
throw Error(`unexpected file content result with type ${typeof buff}`);
}
async rm(key: string): Promise<void> {
if (key === "/") {
return;
}
await this._init();
try {
const remoteFileName = getWebdavPath(key, this.remoteBaseDir);
await this.client.deleteFile(remoteFileName);
// console.info(`delete ${remoteFileName} succeeded`);
} catch (err) {
console.error("some error while deleting");
console.error(err);
}
}
async checkConnect(callbackFunc?: any): Promise<boolean> {
if (
!(
this.webdavConfig.address.startsWith("http://") ||
this.webdavConfig.address.startsWith("https://")
)
) {
const err =
"Error: the url should start with http(s):// but it does not!";
console.error(err);
if (callbackFunc !== undefined) {
callbackFunc(err);
}
return false;
}
try {
await this._init();
const results = await this._statFromRoot(`/${this.remoteBaseDir}/`);
if (results === undefined) {
const err = "results is undefined";
console.error(err);
callbackFunc?.(err);
return false;
}
return true;
} catch (err) {
console.error(err);
callbackFunc?.(err);
return false;
}
}
async getUserDisplayName(): Promise<string> {
throw new Error("Method not implemented.");
}
async revokeAuth() {
throw new Error("Method not implemented.");
}
}

View File

@ -7,7 +7,7 @@ import {
RemotelySavePluginSettings,
QRExportType,
} from "./baseTypes";
import { getShrinkedSettings } from "./remoteForOnedrive";
import { getShrinkedSettings } from "./fsOnedrive";
export const exportQrCodeUri = async (
settings: RemotelySavePluginSettings,

View File

@ -133,13 +133,10 @@
"settings_runoncestartup_1sec": "sync once after 1 second of start up",
"settings_runoncestartup_10sec": "sync once after 10 seconds of start up",
"settings_runoncestartup_30sec": "sync once after 30 seconds of start up",
"settings_saverun": "Sync On Save (experimental)",
"settings_saverun_desc": "A sync will be triggered if a file save action happened within a few seconds. Please pay attention that syncing is potentially a heavy action and battery may be impacted. (May need to reload the plugin or restart Obsidian after changing)",
"settings_saverun_notset": "(not set)",
"settings_saverun_1sec": "check every 1 second",
"settings_saverun_5sec": "check every 5 seconds",
"settings_saverun_10sec": "check every 10 seconds (recommended)",
"settings_saverun_1min": "check every 1 minute",
"settings_synconsave": "Sync On Save (experimental)",
"settings_synconsave_desc": "If you change your file, the plugin tries to trigger a sync.",
"settings_synconsave_disable": "Disable (default)",
"settings_synconsave_enable": "Enable",
"settings_skiplargefiles": "Skip Large Files",
"settings_skiplargefiles_desc": "Skip files with sizes larger than the threshold. Here 1 MB = 10^6 bytes.",
"settings_skiplargefiles_notset": "(not set)",

View File

@ -132,13 +132,10 @@
"settings_runoncestartup_1sec": "启动后第 1 秒运行一次",
"settings_runoncestartup_10sec": "启动后第 10 秒运行一次",
"settings_runoncestartup_30sec": "启动后第 30 秒运行一次",
"settings_saverun": "保存时同步(实验性质)",
"settings_saverun_desc": "插件如果检查到当前文件在最近一段时间有修改保存过,则尝试同步。请注意,同步是一个很重的操作,因此会影响到耗电量。(修改设置后可能需要重载插件或重启。)",
"settings_saverun_notset": "(不设置)",
"settings_saverun_1sec": "隔 1 秒检查一次",
"settings_saverun_5sec": "隔 5 秒检查一次",
"settings_saverun_10sec": "隔 10 秒检查一次(推荐)",
"settings_saverun_1min": "隔 1 分钟检查一次",
"settings_synconsave": "保存时同步(实验性质)",
"settings_synconsave_desc": "插件如果检查到当前文件在最近一段时间有修改保存过,则尝试同步。请注意,同步是一个很重的操作,因此会影响到耗电量。(修改设置后可能需要重载插件或重启。)",
"settings_synconsave_disable": "关闭(默认)",
"settings_synconsave_enable": "开启",
"settings_skiplargefiles": "跳过大文件",
"settings_skiplargefiles_desc": "跳过大于某一个阈值的文件。这里 1 MB = 10^6 bytes。",
"settings_skiplargefiles_notset": "(不设置)",

View File

@ -131,13 +131,10 @@
"settings_runoncestartup_1sec": "啟動後第 1 秒執行一次",
"settings_runoncestartup_10sec": "啟動後第 10 秒執行一次",
"settings_runoncestartup_30sec": "啟動後第 30 秒執行一次",
"settings_saverun": "儲存時同步(實驗性質)",
"settings_saverun_desc": "外掛如果檢查到當前檔案在最近一段時間有修改儲存過,則嘗試同步。請注意,同步是一個很重的操作,因此會影響到耗電量。(修改設定後可能需要過載外掛或重啟。)",
"settings_saverun_notset": "(不設定)",
"settings_saverun_1sec": "隔 1 秒檢查一次",
"settings_saverun_5sec": "隔 5 秒檢查一次",
"settings_saverun_10sec": "隔 10 秒檢查一次(推薦)",
"settings_saverun_1min": "隔 1 分鐘檢查一次",
"settings_synconsave": "儲存時同步(實驗性質)",
"settings_synconsave_desc": "外掛如果檢查到當前檔案在最近一段時間有修改儲存過,則嘗試同步。請注意,同步是一個很重的操作,因此會影響到耗電量。(修改設定後可能需要過載外掛或重啟。)",
"settings_synconsave_disable": "關閉(預設)",
"settings_synconsave_enable": "開啟",
"settings_skiplargefiles": "跳過大檔案",
"settings_skiplargefiles_desc": "跳過大於某一個閾值的檔案。這裡 1 MB = 10^6 bytes。",
"settings_skiplargefiles_notset": "(不設定)",

View File

@ -1,76 +0,0 @@
import { TFile, TFolder, type Vault } from "obsidian";
import type { Entity, MixedEntity } from "./baseTypes";
import { listFilesInObsFolder } from "./obsFolderLister";
import { Profiler } from "./profiler";
export const getLocalEntityList = async (
vault: Vault,
syncConfigDir: boolean,
configDir: string,
pluginID: string,
profiler: Profiler
) => {
profiler.addIndent();
profiler.insert("enter getLocalEntityList");
const local: Entity[] = [];
const localTAbstractFiles = vault.getAllLoadedFiles();
profiler.insert("finish getting getAllLoadedFiles");
for (const entry of localTAbstractFiles) {
let r = {} as Entity;
let key = entry.path;
if (entry.path === "/") {
// ignore
continue;
} else if (entry instanceof TFile) {
let mtimeLocal: number | undefined = entry.stat.mtime;
if (mtimeLocal <= 0) {
mtimeLocal = entry.stat.ctime;
}
if (mtimeLocal === 0) {
mtimeLocal = undefined;
}
if (mtimeLocal === undefined) {
throw Error(
`Your file has last modified time 0: ${key}, don't know how to deal with it`
);
}
r = {
key: entry.path, // local always unencrypted
keyRaw: entry.path,
mtimeCli: mtimeLocal,
mtimeSvr: mtimeLocal,
size: entry.stat.size, // local always unencrypted
sizeRaw: entry.stat.size,
};
} else if (entry instanceof TFolder) {
key = `${entry.path}/`;
r = {
key: key,
keyRaw: key,
size: 0,
sizeRaw: 0,
};
} else {
throw Error(`unexpected ${entry}`);
}
local.push(r);
}
profiler.insert("finish transforming getAllLoadedFiles");
if (syncConfigDir) {
profiler.insert("into syncConfigDir");
const syncFiles = await listFilesInObsFolder(configDir, vault, pluginID);
for (const f of syncFiles) {
local.push(f);
}
profiler.insert("finish syncConfigDir");
}
profiler.insert("finish getLocalEntityList");
profiler.removeIndent();
return local;
};

View File

@ -3,12 +3,10 @@ import { extendPrototype } from "localforage-getitems";
extendPrototype(localforage);
export type LocalForage = typeof localforage;
import { nanoid } from "nanoid";
import { requireApiVersion, TAbstractFile, TFile, TFolder } from "obsidian";
import { API_VER_STAT_FOLDER } from "./baseTypes";
import type { Entity, MixedEntity, SUPPORTED_SERVICES_TYPE } from "./baseTypes";
import type { SyncPlanType } from "./sync";
import { statFix, toText, unixTimeToStr } from "./misc";
import { unixTimeToStr } from "./misc";
const DB_VERSION_NUMBER_IN_HISTORY = [20211114, 20220108, 20220326, 20240220];
export const DEFAULT_DB_VERSION_NUMBER: number = 20240220;

View File

@ -7,7 +7,6 @@ import {
setIcon,
FileSystemAdapter,
Platform,
requestUrl,
requireApiVersion,
Events,
} from "obsidian";
@ -26,7 +25,6 @@ import {
} from "./baseTypes";
import { importQrCodeUri } from "./importExport";
import {
insertSyncPlanRecordByVault,
prepareDBs,
InternalDBs,
clearExpiredSyncPlanRecords,
@ -34,43 +32,35 @@ import {
clearAllLoggerOutputRecords,
upsertLastSuccessSyncTimeByVault,
getLastSuccessSyncTimeByVault,
getAllPrevSyncRecordsByVaultAndProfile,
insertProfilerResultByVault,
} from "./localdb";
import { RemoteClient } from "./remote";
import {
DEFAULT_DROPBOX_CONFIG,
getAuthUrlAndVerifier as getAuthUrlAndVerifierDropbox,
sendAuthReq as sendAuthReqDropbox,
setConfigBySuccessfullAuthInplace as setConfigBySuccessfullAuthInplaceDropbox,
} from "./remoteForDropbox";
} from "./fsDropbox";
import {
AccessCodeResponseSuccessfulType,
DEFAULT_ONEDRIVE_CONFIG,
sendAuthReq as sendAuthReqOnedrive,
setConfigBySuccessfullAuthInplace as setConfigBySuccessfullAuthInplaceOnedrive,
} from "./remoteForOnedrive";
import { DEFAULT_S3_CONFIG } from "./remoteForS3";
import { DEFAULT_WEBDAV_CONFIG } from "./remoteForWebdav";
} from "./fsOnedrive";
import { DEFAULT_S3_CONFIG } from "./fsS3";
import { DEFAULT_WEBDAV_CONFIG } from "./fsWebdav";
import { RemotelySaveSettingTab } from "./settings";
import {
doActualSync,
ensembleMixedEnties,
getSyncPlanInplace,
isPasswordOk,
SyncStatusType,
} from "./sync";
import { messyConfigToNormal, normalConfigToMessy } from "./configPersist";
import { getLocalEntityList } from "./local";
import { I18n } from "./i18n";
import type { LangType, LangTypeAndAuto, TransItemType } from "./i18n";
import type { LangTypeAndAuto, TransItemType } from "./i18n";
import { SyncAlgoV3Modal } from "./syncAlgoV3Notice";
import AggregateError from "aggregate-error";
import { exportVaultSyncPlansToFiles } from "./debugMode";
import { changeMobileStatusBar, compareVersion } from "./misc";
import { Cipher } from "./encryptUnified";
import { changeMobileStatusBar } from "./misc";
import { Profiler } from "./profiler";
import { FakeFsLocal } from "./fsLocal";
import { FakeFsEncrypt } from "./fsEncrypt";
import { syncer } from "./sync";
import { getClient } from "./fsGetter";
import throttle from "lodash/throttle";
const DEFAULT_SETTINGS: RemotelySavePluginSettings = {
s3: DEFAULT_S3_CONFIG,
@ -141,7 +131,8 @@ const getIconSvg = () => {
export default class RemotelySavePlugin extends Plugin {
settings!: RemotelySavePluginSettings;
db!: InternalDBs;
syncStatus!: SyncStatusType;
isSyncing!: boolean;
hasPendingSyncOnSave!: boolean;
statusBarElement!: HTMLSpanElement;
oauth2Info!: OAuth2Info;
currLogLevel!: string;
@ -156,7 +147,25 @@ export default class RemotelySavePlugin extends Plugin {
appContainerObserver?: MutationObserver;
async syncRun(triggerSource: SyncTriggerSourceType = "manual") {
const profiler = new Profiler("start of syncRun");
const profiler = new Profiler();
const fsLocal = new FakeFsLocal(
this.app.vault,
this.settings.syncConfigDir ?? false,
this.app.vault.configDir,
this.manifest.id,
profiler,
this.settings.deleteToWhere ?? "system"
);
const fsRemote = getClient(
this.settings,
this.app.vault.getName(),
async () => await this.saveSettings()
);
const fsEncrypt = new FakeFsEncrypt(
fsRemote,
this.settings.password ?? "",
this.settings.encryptionMethod ?? "rclone-base64"
);
const t = (x: TransItemType, vars?: any) => {
return this.i18n.t(x, vars);
@ -164,333 +173,241 @@ export default class RemotelySavePlugin extends Plugin {
const profileID = this.getCurrProfileID();
const getNotice = (x: string, timeout?: number) => {
// only show notices in manual mode
// no notice in auto mode
if (triggerSource === "manual" || triggerSource === "dry") {
new Notice(x, timeout);
const getProtectError = (
protectModifyPercentage: number,
realModifyDeleteCount: number,
allFilesCount: number
) => {
const percent = ((100 * realModifyDeleteCount) / allFilesCount).toFixed(
1
);
const res = t("syncrun_abort_protectmodifypercentage", {
protectModifyPercentage,
realModifyDeleteCount,
allFilesCount,
percent,
});
return res;
};
const getNotice = (
s: SyncTriggerSourceType,
msg: string,
timeout?: number
) => {
if (s === "manual" || s === "dry") {
new Notice(msg, timeout);
}
};
if (this.syncStatus !== "idle") {
// really, users don't want to see this in auto mode
// so we use getNotice to avoid unnecessary show up
const notifyFunc = async (s: SyncTriggerSourceType, step: number) => {
switch (step) {
case 0:
if (s === "dry") {
if (this.settings.currLogLevel === "info") {
getNotice(s, t("syncrun_shortstep0"));
} else {
getNotice(s, t("syncrun_step0"));
}
}
break;
case 1:
if (this.settings.currLogLevel === "info") {
getNotice(
s,
t("syncrun_shortstep1", {
serviceType: this.settings.serviceType,
})
);
} else {
getNotice(
s,
t("syncrun_step1", {
serviceType: this.settings.serviceType,
})
);
}
break;
case 2:
if (this.settings.currLogLevel === "info") {
// pass
} else {
getNotice(s, t("syncrun_step2"));
}
break;
case 3:
if (this.settings.currLogLevel === "info") {
// pass
} else {
getNotice(s, t("syncrun_step3"));
}
break;
case 4:
if (this.settings.currLogLevel === "info") {
// pass
} else {
getNotice(s, t("syncrun_step4"));
}
break;
case 5:
if (this.settings.currLogLevel === "info") {
// pass
} else {
getNotice(s, t("syncrun_step5"));
}
break;
case 6:
if (this.settings.currLogLevel === "info") {
// pass
} else {
getNotice(s, t("syncrun_step6"));
}
break;
case 7:
if (s === "dry") {
if (this.settings.currLogLevel === "info") {
getNotice(s, t("syncrun_shortstep2skip"));
} else {
getNotice(s, t("syncrun_step7skip"));
}
} else {
if (this.settings.currLogLevel === "info") {
// pass
} else {
getNotice(s, t("syncrun_step7"));
}
}
break;
case 8:
if (this.settings.currLogLevel === "info") {
getNotice(s, t("syncrun_shortstep2"));
} else {
getNotice(s, t("syncrun_step8"));
}
break;
default:
throw Error(`unknown step=${step} for showing notice`);
break;
}
};
const errNotifyFunc = async (s: SyncTriggerSourceType, error: Error) => {
console.error(error);
if (error instanceof AggregateError) {
for (const e of error.errors) {
getNotice(s, e.message, 10 * 1000);
}
} else {
getNotice(s, error?.message ?? "error while sync", 10 * 1000);
}
};
const ribboonFunc = async (s: SyncTriggerSourceType, step: number) => {
if (step === 1) {
if (this.syncRibbon !== undefined) {
setIcon(this.syncRibbon, iconNameSyncRunning);
this.syncRibbon.setAttribute(
"aria-label",
t("syncrun_syncingribbon", {
pluginName: this.manifest.name,
triggerSource: s,
})
);
}
} else if (step === 8) {
// last step
if (this.syncRibbon !== undefined) {
setIcon(this.syncRibbon, iconNameSyncWait);
let originLabel = `${this.manifest.name}`;
this.syncRibbon.setAttribute("aria-label", originLabel);
}
}
};
const statusBarFunc = async (s: SyncTriggerSourceType, step: number) => {
if (step === 1) {
// change status to "syncing..." on statusbar
this.updateLastSuccessSyncMsg(-1);
} else if (step === 8) {
const lastSuccessSyncMillis = Date.now();
await upsertLastSuccessSyncTimeByVault(
this.db,
this.vaultRandomID,
lastSuccessSyncMillis
);
this.updateLastSuccessSyncMsg(lastSuccessSyncMillis);
}
};
const markIsSyncingFunc = async (isSyncing: boolean) => {
this.isSyncing = isSyncing;
};
const callbackSyncProcess = async (
realCounter: number,
realTotalCount: number,
pathName: string,
decision: string
) => {
this.setCurrSyncMsg(
realCounter,
realTotalCount,
pathName,
decision,
triggerSource
);
};
if (this.isSyncing) {
getNotice(
triggerSource,
t("syncrun_alreadyrunning", {
pluginName: this.manifest.name,
syncStatus: this.syncStatus,
syncStatus: "running",
newTriggerSource: triggerSource,
})
);
if (this.currSyncMsg !== undefined && this.currSyncMsg !== "") {
getNotice(this.currSyncMsg);
getNotice(triggerSource, this.currSyncMsg);
}
return;
}
let originLabel = `${this.manifest.name}`;
if (this.syncRibbon !== undefined) {
originLabel = this.syncRibbon.getAttribute("aria-label") as string;
}
try {
console.info(
`${
this.manifest.id
}-${Date.now()}: start sync, triggerSource=${triggerSource}`
);
if (this.syncRibbon !== undefined) {
setIcon(this.syncRibbon, iconNameSyncRunning);
this.syncRibbon.setAttribute(
"aria-label",
t("syncrun_syncingribbon", {
pluginName: this.manifest.name,
triggerSource: triggerSource,
})
);
}
if (triggerSource === "dry") {
if (this.settings.currLogLevel === "info") {
getNotice(t("syncrun_shortstep0"));
} else {
getNotice(t("syncrun_step0"));
}
}
// change status to "syncing..." on statusbar
if (this.statusBarElement !== undefined) {
this.updateLastSuccessSyncMsg(-1);
}
//console.info(`huh ${this.settings.password}`)
if (this.settings.currLogLevel === "info") {
getNotice(
t("syncrun_shortstep1", {
serviceType: this.settings.serviceType,
})
);
} else {
getNotice(
t("syncrun_step1", {
serviceType: this.settings.serviceType,
})
);
}
this.syncStatus = "preparing";
profiler.insert("finish step1");
if (this.settings.currLogLevel === "info") {
// pass
} else {
getNotice(t("syncrun_step2"));
}
this.syncStatus = "getting_remote_files_list";
const self = this;
const client = new RemoteClient(
this.settings.serviceType,
this.settings.s3,
this.settings.webdav,
this.settings.dropbox,
this.settings.onedrive,
this.app.vault.getName(),
() => self.saveSettings(),
profiler
);
const remoteEntityList = await client.listAllFromRemote();
console.debug("remoteEntityList:");
console.debug(remoteEntityList);
profiler.insert("finish step2 (listing remote)");
if (this.settings.currLogLevel === "info") {
// pass
} else {
getNotice(t("syncrun_step3"));
}
this.syncStatus = "checking_password";
const cipher = new Cipher(
this.settings.password,
this.settings.encryptionMethod ?? "unknown"
);
const passwordCheckResult = await isPasswordOk(remoteEntityList, cipher);
if (!passwordCheckResult.ok) {
getNotice(t("syncrun_passworderr"));
throw Error(passwordCheckResult.reason);
}
profiler.insert("finish step3 (checking password)");
if (this.settings.currLogLevel === "info") {
// pass
} else {
getNotice(t("syncrun_step4"));
}
this.syncStatus = "getting_local_meta";
const localEntityList = await getLocalEntityList(
this.app.vault,
this.settings.syncConfigDir ?? false,
this.app.vault.configDir,
this.manifest.id,
profiler
);
console.debug("localEntityList:");
console.debug(localEntityList);
profiler.insert("finish step4 (local meta)");
if (this.settings.currLogLevel === "info") {
// pass
} else {
getNotice(t("syncrun_step5"));
}
this.syncStatus = "getting_local_prev_sync";
const prevSyncEntityList = await getAllPrevSyncRecordsByVaultAndProfile(
this.db,
this.vaultRandomID,
profileID
);
console.debug("prevSyncEntityList:");
console.debug(prevSyncEntityList);
profiler.insert("finish step5 (prev sync)");
if (this.settings.currLogLevel === "info") {
// pass
} else {
getNotice(t("syncrun_step6"));
}
this.syncStatus = "generating_plan";
let mixedEntityMappings = await ensembleMixedEnties(
localEntityList,
prevSyncEntityList,
remoteEntityList,
this.settings.syncConfigDir ?? false,
this.app.vault.configDir,
this.settings.syncUnderscoreItems ?? false,
this.settings.ignorePaths ?? [],
cipher,
this.settings.serviceType,
profiler
);
profiler.insert("finish building partial mixedEntity");
mixedEntityMappings = await getSyncPlanInplace(
mixedEntityMappings,
this.settings.howToCleanEmptyFolder ?? "skip",
this.settings.skipSizeLargerThan ?? -1,
this.settings.conflictAction ?? "keep_newer",
this.settings.syncDirection ?? "bidirectional",
profiler
);
console.info(`mixedEntityMappings:`);
console.info(mixedEntityMappings); // for debugging
profiler.insert("finish building full sync plan");
await insertSyncPlanRecordByVault(
this.db,
mixedEntityMappings,
this.vaultRandomID,
client.serviceType
);
profiler.insert("finish writing sync plan");
profiler.insert("finish step6 (plan)");
// The operations above are almost read only and kind of safe.
// The operations below begins to write or delete (!!!) something.
if (triggerSource !== "dry") {
if (this.settings.currLogLevel === "info") {
// pass
} else {
getNotice(t("syncrun_step7"));
}
this.syncStatus = "syncing";
await doActualSync(
mixedEntityMappings,
client,
this.vaultRandomID,
profileID,
this.app.vault,
cipher,
this.settings.concurrency ?? 5,
(key: string) => self.trash(key),
this.settings.protectModifyPercentage ?? 50,
(
protectModifyPercentage: number,
realModifyDeleteCount: number,
allFilesCount: number
) => {
const percent = (
(100 * realModifyDeleteCount) /
allFilesCount
).toFixed(1);
const res = t("syncrun_abort_protectmodifypercentage", {
protectModifyPercentage,
realModifyDeleteCount,
allFilesCount,
percent,
});
return res;
},
(
realCounter: number,
realTotalCount: number,
pathName: string,
decision: string
) =>
self.setCurrSyncMsg(
realCounter,
realTotalCount,
pathName,
decision,
triggerSource
),
this.db,
profiler
);
} else {
this.syncStatus = "syncing";
if (this.settings.currLogLevel === "info") {
getNotice(t("syncrun_shortstep2skip"));
} else {
getNotice(t("syncrun_step7skip"));
}
}
cipher.closeResources();
profiler.insert("finish step7 (actual sync)");
if (this.settings.currLogLevel === "info") {
getNotice(t("syncrun_shortstep2"));
} else {
getNotice(t("syncrun_step8"));
}
this.syncStatus = "finish";
this.syncStatus = "idle";
profiler.insert("finish step8");
const lastSuccessSyncMillis = Date.now();
await upsertLastSuccessSyncTimeByVault(
this.db,
this.vaultRandomID,
lastSuccessSyncMillis
);
if (this.syncRibbon !== undefined) {
setIcon(this.syncRibbon, iconNameSyncWait);
this.syncRibbon.setAttribute("aria-label", originLabel);
}
if (this.statusBarElement !== undefined) {
this.updateLastSuccessSyncMsg(lastSuccessSyncMillis);
}
this.syncEvent?.trigger("SYNC_DONE");
console.info(
`${
this.manifest.id
}-${Date.now()}: finish sync, triggerSource=${triggerSource}`
);
} catch (error: any) {
profiler.insert("start error branch");
const msg = t("syncrun_abort", {
manifestID: this.manifest.id,
theDate: `${Date.now()}`,
triggerSource: triggerSource,
syncStatus: this.syncStatus,
});
console.error(msg);
console.error(error);
getNotice(msg, 10 * 1000);
if (error instanceof AggregateError) {
for (const e of error.errors) {
getNotice(e.message, 10 * 1000);
}
} else {
getNotice(error?.message ?? "error while sync", 10 * 1000);
}
this.syncStatus = "idle";
if (this.syncRibbon !== undefined) {
setIcon(this.syncRibbon, iconNameSyncWait);
this.syncRibbon.setAttribute("aria-label", originLabel);
}
profiler.insert("finish error branch");
}
profiler.insert("finish syncRun");
console.debug(profiler.toString());
insertProfilerResultByVault(
await syncer(
fsLocal,
fsRemote,
fsEncrypt,
profiler,
this.db,
profiler.toString(),
triggerSource,
profileID,
this.vaultRandomID,
this.settings.serviceType
this.app.vault.configDir,
this.settings,
getProtectError,
markIsSyncingFunc,
notifyFunc,
errNotifyFunc,
ribboonFunc,
statusBarFunc,
callbackSyncProcess
);
fsEncrypt.closeResources();
profiler.clear();
this.syncEvent?.trigger("SYNC_DONE");
}
async onload() {
@ -511,6 +428,8 @@ export default class RemotelySavePlugin extends Plugin {
}; // init
this.currSyncMsg = "";
this.isSyncing = false;
this.hasPendingSyncOnSave = false;
this.syncEvent = new Events();
@ -561,8 +480,6 @@ export default class RemotelySavePlugin extends Plugin {
// must AFTER preparing DB
this.enableAutoClearSyncPlanHist();
this.syncStatus = "idle";
this.registerObsidianProtocolHandler(COMMAND_URI, async (inputParams) => {
// console.debug(inputParams);
const parsed = importQrCodeUri(inputParams, this.app.vault.getName());
@ -633,17 +550,12 @@ export default class RemotelySavePlugin extends Plugin {
() => self.saveSettings()
);
const client = new RemoteClient(
"dropbox",
undefined,
undefined,
this.settings.dropbox,
undefined,
const client = getClient(
this.settings,
this.app.vault.getName(),
() => self.saveSettings()
);
const username = await client.getUser();
const username = await client.getUserDisplayName();
this.settings.dropbox.username = username;
await this.saveSettings();
@ -729,16 +641,12 @@ export default class RemotelySavePlugin extends Plugin {
() => self.saveSettings()
);
const client = new RemoteClient(
"onedrive",
undefined,
undefined,
undefined,
this.settings.onedrive,
const client = getClient(
this.settings,
this.app.vault.getName(),
() => self.saveSettings()
);
this.settings.onedrive.username = await client.getUser();
this.settings.onedrive.username = await client.getUserDisplayName();
await this.saveSettings();
this.oauth2Info.verifier = ""; // reset it
@ -879,7 +787,7 @@ export default class RemotelySavePlugin extends Plugin {
} else {
this.enableAutoSyncIfSet();
this.enableInitSyncIfSet();
this.enableSyncOnSaveIfSet();
this.toggleSyncOnSaveIfSet();
}
// compare versions and read new versions
@ -1191,75 +1099,89 @@ export default class RemotelySavePlugin extends Plugin {
}
}
enableSyncOnSaveIfSet() {
async _checkCurrFileModified(caller: "SYNC" | "FILE_CHANGES") {
console.debug(`inside checkCurrFileModified`);
const currentFile = this.app.workspace.getActiveFile();
if (currentFile) {
console.debug(`we have currentFile=${currentFile.path}`);
// get the last modified time of the current file
// if it has modified after lastSuccessSync
// then schedule a run for syncOnSaveAfterMilliseconds after it was modified
const lastModified = currentFile.stat.mtime;
const lastSuccessSyncMillis = await getLastSuccessSyncTimeByVault(
this.db,
this.vaultRandomID
);
console.debug(
`lastModified=${lastModified}, lastSuccessSyncMillis=${lastSuccessSyncMillis}`
);
if (
caller === "SYNC" ||
(caller === "FILE_CHANGES" && lastModified > lastSuccessSyncMillis)
) {
console.debug(
`so lastModified > lastSuccessSyncMillis or it's called while syncing before`
);
console.debug(
`caller=${caller}, isSyncing=${this.isSyncing}, hasPendingSyncOnSave=${this.hasPendingSyncOnSave}`
);
if (this.isSyncing) {
this.hasPendingSyncOnSave = true;
// wait for next event
return;
} else {
if (this.hasPendingSyncOnSave || caller === "FILE_CHANGES") {
this.hasPendingSyncOnSave = false;
await this.syncRun("auto_sync_on_save");
}
return;
}
}
} else {
console.debug(`no currentFile here`);
}
}
_syncOnSaveEvent1 = () => {
this._checkCurrFileModified("SYNC");
};
_syncOnSaveEvent2 = throttle(
async () => {
await this._checkCurrFileModified("FILE_CHANGES");
},
1000 * 3,
{
leading: false,
trailing: true,
}
);
toggleSyncOnSaveIfSet() {
if (
this.settings.syncOnSaveAfterMilliseconds !== undefined &&
this.settings.syncOnSaveAfterMilliseconds !== null &&
this.settings.syncOnSaveAfterMilliseconds > 0
) {
let runScheduled = false;
let needToRunAgain = false;
const scheduleSyncOnSave = (scheduleTimeFromNow: number) => {
console.info(
`schedule a run for ${scheduleTimeFromNow} milliseconds later`
);
runScheduled = true;
setTimeout(() => {
this.syncRun("auto_sync_on_save");
runScheduled = false;
}, scheduleTimeFromNow);
};
const checkCurrFileModified = async (caller: "SYNC" | "FILE_CHANGES") => {
const currentFile = this.app.workspace.getActiveFile();
if (currentFile) {
// get the last modified time of the current file
// if it has modified after lastSuccessSync
// then schedule a run for syncOnSaveAfterMilliseconds after it was modified
const lastModified = currentFile.stat.mtime;
const lastSuccessSyncMillis = await getLastSuccessSyncTimeByVault(
this.db,
this.vaultRandomID
);
if (
this.syncStatus === "idle" &&
lastModified > lastSuccessSyncMillis &&
!runScheduled
) {
scheduleSyncOnSave(this.settings!.syncOnSaveAfterMilliseconds!);
} else if (
this.syncStatus === "idle" &&
needToRunAgain &&
!runScheduled
) {
scheduleSyncOnSave(this.settings!.syncOnSaveAfterMilliseconds!);
needToRunAgain = false;
} else {
if (caller === "FILE_CHANGES") {
needToRunAgain = true;
}
}
}
};
this.app.workspace.onLayoutReady(() => {
// listen to sync done
this.registerEvent(
this.syncEvent?.on("SYNC_DONE", () => {
checkCurrFileModified("SYNC");
})!
this.syncEvent?.on("SYNC_DONE", this._syncOnSaveEvent1)!
);
// listen to current file save changes
this.registerEvent(
this.app.vault.on("modify", (x) => {
// console.debug(`event=modify! file=${x}`);
checkCurrFileModified("FILE_CHANGES");
})
);
this.registerEvent(this.app.vault.on("modify", this._syncOnSaveEvent2));
this.registerEvent(this.app.vault.on("create", this._syncOnSaveEvent2));
this.registerEvent(this.app.vault.on("delete", this._syncOnSaveEvent2));
});
} else {
this.syncEvent?.off("SYNC_DONE", this._syncOnSaveEvent1);
this.app.vault.off("modify", this._syncOnSaveEvent2);
this.app.vault.off("create", this._syncOnSaveEvent2);
this.app.vault.off("delete", this._syncOnSaveEvent2);
}
}
@ -1276,7 +1198,7 @@ export default class RemotelySavePlugin extends Plugin {
await this.saveSettings();
}
async setCurrSyncMsg(
setCurrSyncMsg(
i: number,
totalCount: number,
pathName: string,

View File

@ -31,13 +31,6 @@ const isLikelyPluginSubFiles = (x: string) => {
return false;
};
export const isInsideObsFolder = (x: string, configDir: string) => {
if (!configDir.startsWith(".")) {
throw Error(`configDir should starts with . but we get ${configDir}`);
}
return x === configDir || x.startsWith(`${configDir}/`);
};
export const listFilesInObsFolder = async (
configDir: string,
vault: Vault,

View File

@ -1,3 +1,5 @@
import { SUPPORTED_SERVICES_TYPE } from "./baseTypes";
import { InternalDBs, insertProfilerResultByVault } from "./localdb";
import { unixTimeToStr } from "./misc";
interface BreakPoint {
@ -79,4 +81,17 @@ export class Profiler {
return res;
}
async save(
db: InternalDBs,
vaultRandomID: string,
remoteType: SUPPORTED_SERVICES_TYPE
) {
await insertProfilerResultByVault(
db,
this.toString(),
vaultRandomID,
remoteType
);
}
}

View File

@ -1,320 +0,0 @@
import { Vault } from "obsidian";
import type {
Entity,
DropboxConfig,
OnedriveConfig,
S3Config,
SUPPORTED_SERVICES_TYPE,
WebdavConfig,
UploadedType,
} from "./baseTypes";
import * as dropbox from "./remoteForDropbox";
import * as onedrive from "./remoteForOnedrive";
import * as s3 from "./remoteForS3";
import * as webdav from "./remoteForWebdav";
import { Cipher } from "./encryptUnified";
import { Profiler } from "./profiler";
export class RemoteClient {
readonly serviceType: SUPPORTED_SERVICES_TYPE;
readonly s3Config?: S3Config;
readonly webdavClient?: webdav.WrappedWebdavClient;
readonly webdavConfig?: WebdavConfig;
readonly dropboxClient?: dropbox.WrappedDropboxClient;
readonly dropboxConfig?: DropboxConfig;
readonly onedriveClient?: onedrive.WrappedOnedriveClient;
readonly onedriveConfig?: OnedriveConfig;
constructor(
serviceType: SUPPORTED_SERVICES_TYPE,
s3Config?: S3Config,
webdavConfig?: WebdavConfig,
dropboxConfig?: DropboxConfig,
onedriveConfig?: OnedriveConfig,
vaultName?: string,
saveUpdatedConfigFunc?: () => Promise<any>,
profiler?: Profiler
) {
this.serviceType = serviceType;
// the client may modify the config inplace,
// so we use a ref not copy of config here
if (serviceType === "s3") {
this.s3Config = s3Config;
} else if (serviceType === "webdav") {
if (vaultName === undefined || saveUpdatedConfigFunc === undefined) {
throw Error(
"remember to provide vault name and callback while init webdav client"
);
}
const remoteBaseDir = webdavConfig!.remoteBaseDir || vaultName;
this.webdavConfig = webdavConfig;
this.webdavClient = webdav.getWebdavClient(
this.webdavConfig!,
remoteBaseDir,
saveUpdatedConfigFunc
);
} else if (serviceType === "dropbox") {
if (vaultName === undefined || saveUpdatedConfigFunc === undefined) {
throw Error(
"remember to provide vault name and callback while init dropbox client"
);
}
const remoteBaseDir = dropboxConfig!.remoteBaseDir || vaultName;
this.dropboxConfig = dropboxConfig;
this.dropboxClient = dropbox.getDropboxClient(
this.dropboxConfig!,
remoteBaseDir,
saveUpdatedConfigFunc
);
} else if (serviceType === "onedrive") {
if (vaultName === undefined || saveUpdatedConfigFunc === undefined) {
throw Error(
"remember to provide vault name and callback while init onedrive client"
);
}
const remoteBaseDir = onedriveConfig!.remoteBaseDir || vaultName;
this.onedriveConfig = onedriveConfig;
this.onedriveClient = onedrive.getOnedriveClient(
this.onedriveConfig!,
remoteBaseDir,
saveUpdatedConfigFunc
);
} else {
throw Error(`not supported service type ${this.serviceType}`);
}
}
getRemoteMeta = async (fileOrFolderPath: string) => {
if (this.serviceType === "s3") {
return await s3.getRemoteMeta(
s3.getS3Client(this.s3Config!),
this.s3Config!,
fileOrFolderPath
);
} else if (this.serviceType === "webdav") {
return await webdav.getRemoteMeta(this.webdavClient!, fileOrFolderPath);
} else if (this.serviceType === "dropbox") {
return await dropbox.getRemoteMeta(this.dropboxClient!, fileOrFolderPath);
} else if (this.serviceType === "onedrive") {
return await onedrive.getRemoteMeta(
this.onedriveClient!,
fileOrFolderPath
);
} else {
throw Error(`not supported service type ${this.serviceType}`);
}
};
uploadToRemote = async (
fileOrFolderPath: string,
vault: Vault | undefined,
isRecursively: boolean,
cipher: Cipher,
remoteEncryptedKey: string = "",
foldersCreatedBefore: Set<string> | undefined = undefined,
uploadRaw: boolean = false,
rawContent: string | ArrayBuffer = ""
): Promise<UploadedType> => {
if (this.serviceType === "s3") {
return await s3.uploadToRemote(
s3.getS3Client(this.s3Config!),
this.s3Config!,
fileOrFolderPath,
vault,
isRecursively,
cipher,
remoteEncryptedKey,
uploadRaw,
rawContent
);
} else if (this.serviceType === "webdav") {
return await webdav.uploadToRemote(
this.webdavClient!,
fileOrFolderPath,
vault,
isRecursively,
cipher,
remoteEncryptedKey,
uploadRaw,
rawContent
);
} else if (this.serviceType === "dropbox") {
return await dropbox.uploadToRemote(
this.dropboxClient!,
fileOrFolderPath,
vault,
isRecursively,
cipher,
remoteEncryptedKey,
foldersCreatedBefore,
uploadRaw,
rawContent
);
} else if (this.serviceType === "onedrive") {
return await onedrive.uploadToRemote(
this.onedriveClient!,
fileOrFolderPath,
vault,
isRecursively,
cipher,
remoteEncryptedKey,
foldersCreatedBefore,
uploadRaw,
rawContent
);
} else {
throw Error(`not supported service type ${this.serviceType}`);
}
};
listAllFromRemote = async (): Promise<Entity[]> => {
if (this.serviceType === "s3") {
return await s3.listAllFromRemote(
s3.getS3Client(this.s3Config!),
this.s3Config!
);
} else if (this.serviceType === "webdav") {
return await webdav.listAllFromRemote(this.webdavClient!);
} else if (this.serviceType === "dropbox") {
return await dropbox.listAllFromRemote(this.dropboxClient!);
} else if (this.serviceType === "onedrive") {
return await onedrive.listAllFromRemote(this.onedriveClient!);
} else {
throw Error(`not supported service type ${this.serviceType}`);
}
};
downloadFromRemote = async (
fileOrFolderPath: string,
vault: Vault,
mtime: number,
cipher: Cipher,
remoteEncryptedKey: string = "",
skipSaving: boolean = false
) => {
if (this.serviceType === "s3") {
return await s3.downloadFromRemote(
s3.getS3Client(this.s3Config!),
this.s3Config!,
fileOrFolderPath,
vault,
mtime,
cipher,
remoteEncryptedKey,
skipSaving
);
} else if (this.serviceType === "webdav") {
return await webdav.downloadFromRemote(
this.webdavClient!,
fileOrFolderPath,
vault,
mtime,
cipher,
remoteEncryptedKey,
skipSaving
);
} else if (this.serviceType === "dropbox") {
return await dropbox.downloadFromRemote(
this.dropboxClient!,
fileOrFolderPath,
vault,
mtime,
cipher,
remoteEncryptedKey,
skipSaving
);
} else if (this.serviceType === "onedrive") {
return await onedrive.downloadFromRemote(
this.onedriveClient!,
fileOrFolderPath,
vault,
mtime,
cipher,
remoteEncryptedKey,
skipSaving
);
} else {
throw Error(`not supported service type ${this.serviceType}`);
}
};
deleteFromRemote = async (
fileOrFolderPath: string,
cipher: Cipher,
remoteEncryptedKey: string = "",
synthesizedFolder: boolean = false
) => {
if (this.serviceType === "s3") {
return await s3.deleteFromRemote(
s3.getS3Client(this.s3Config!),
this.s3Config!,
fileOrFolderPath,
cipher,
remoteEncryptedKey,
synthesizedFolder
);
} else if (this.serviceType === "webdav") {
return await webdav.deleteFromRemote(
this.webdavClient!,
fileOrFolderPath,
cipher,
remoteEncryptedKey
);
} else if (this.serviceType === "dropbox") {
return await dropbox.deleteFromRemote(
this.dropboxClient!,
fileOrFolderPath,
cipher,
remoteEncryptedKey
);
} else if (this.serviceType === "onedrive") {
return await onedrive.deleteFromRemote(
this.onedriveClient!,
fileOrFolderPath,
cipher,
remoteEncryptedKey
);
} else {
throw Error(`not supported service type ${this.serviceType}`);
}
};
checkConnectivity = async (callbackFunc?: any) => {
if (this.serviceType === "s3") {
return await s3.checkConnectivity(
s3.getS3Client(this.s3Config!),
this.s3Config!,
callbackFunc
);
} else if (this.serviceType === "webdav") {
return await webdav.checkConnectivity(this.webdavClient!, callbackFunc);
} else if (this.serviceType === "dropbox") {
return await dropbox.checkConnectivity(this.dropboxClient!, callbackFunc);
} else if (this.serviceType === "onedrive") {
return await onedrive.checkConnectivity(
this.onedriveClient!,
callbackFunc
);
} else {
throw Error(`not supported service type ${this.serviceType}`);
}
};
getUser = async () => {
if (this.serviceType === "dropbox") {
return await dropbox.getUserDisplayName(this.dropboxClient!);
} else if (this.serviceType === "onedrive") {
return await onedrive.getUserDisplayName(this.onedriveClient!);
} else {
throw Error(`not supported service type ${this.serviceType}`);
}
};
revokeAuth = async () => {
if (this.serviceType === "dropbox") {
return await dropbox.revokeAuth(this.dropboxClient!);
} else {
throw Error(`not supported service type ${this.serviceType}`);
}
};
}

View File

@ -1,844 +0,0 @@
import type { _Object } from "@aws-sdk/client-s3";
import {
DeleteObjectCommand,
GetObjectCommand,
HeadBucketCommand,
HeadObjectCommand,
HeadObjectCommandOutput,
ListObjectsV2Command,
ListObjectsV2CommandInput,
PutObjectCommand,
S3Client,
} from "@aws-sdk/client-s3";
import { Upload } from "@aws-sdk/lib-storage";
import { HttpHandler, HttpRequest, HttpResponse } from "@smithy/protocol-http";
import {
FetchHttpHandler,
FetchHttpHandlerOptions,
} from "@smithy/fetch-http-handler";
// @ts-ignore
import { requestTimeout } from "@smithy/fetch-http-handler/dist-es/request-timeout";
import { buildQueryString } from "@smithy/querystring-builder";
import { HeaderBag, HttpHandlerOptions, Provider } from "@aws-sdk/types";
import { Buffer } from "buffer";
import * as mime from "mime-types";
import { Vault, requestUrl, RequestUrlParam, Platform } from "obsidian";
import { Readable } from "stream";
import * as path from "path";
import AggregateError from "aggregate-error";
import {
DEFAULT_CONTENT_TYPE,
Entity,
S3Config,
UploadedType,
VALID_REQURL,
} from "./baseTypes";
import {
arrayBufferToBuffer,
bufferToArrayBuffer,
mkdirpInVault,
} from "./misc";
export { S3Client } from "@aws-sdk/client-s3";
import PQueue from "p-queue";
import { Cipher } from "./encryptUnified";
////////////////////////////////////////////////////////////////////////////////
// special handler using Obsidian requestUrl
////////////////////////////////////////////////////////////////////////////////
/**
* This is close to origin implementation of FetchHttpHandler
* https://github.com/aws/aws-sdk-js-v3/blob/main/packages/fetch-http-handler/src/fetch-http-handler.ts
* that is released under Apache 2 License.
* But this uses Obsidian requestUrl instead.
*/
class ObsHttpHandler extends FetchHttpHandler {
requestTimeoutInMs: number | undefined;
reverseProxyNoSignUrl: string | undefined;
constructor(
options?: FetchHttpHandlerOptions,
reverseProxyNoSignUrl?: string
) {
super(options);
this.requestTimeoutInMs =
options === undefined ? undefined : options.requestTimeout;
this.reverseProxyNoSignUrl = reverseProxyNoSignUrl;
}
async handle(
request: HttpRequest,
{ abortSignal }: HttpHandlerOptions = {}
): Promise<{ response: HttpResponse }> {
if (abortSignal?.aborted) {
const abortError = new Error("Request aborted");
abortError.name = "AbortError";
return Promise.reject(abortError);
}
let path = request.path;
if (request.query) {
const queryString = buildQueryString(request.query);
if (queryString) {
path += `?${queryString}`;
}
}
const { port, method } = request;
let url = `${request.protocol}//${request.hostname}${
port ? `:${port}` : ""
}${path}`;
if (
this.reverseProxyNoSignUrl !== undefined &&
this.reverseProxyNoSignUrl !== ""
) {
const urlObj = new URL(url);
urlObj.host = this.reverseProxyNoSignUrl;
url = urlObj.href;
}
const body =
method === "GET" || method === "HEAD" ? undefined : request.body;
const transformedHeaders: Record<string, string> = {};
for (const key of Object.keys(request.headers)) {
const keyLower = key.toLowerCase();
if (keyLower === "host" || keyLower === "content-length") {
continue;
}
transformedHeaders[keyLower] = request.headers[key];
}
let contentType: string | undefined = undefined;
if (transformedHeaders["content-type"] !== undefined) {
contentType = transformedHeaders["content-type"];
}
let transformedBody: any = body;
if (ArrayBuffer.isView(body)) {
transformedBody = bufferToArrayBuffer(body);
}
const param: RequestUrlParam = {
body: transformedBody,
headers: transformedHeaders,
method: method,
url: url,
contentType: contentType,
};
const raceOfPromises = [
requestUrl(param).then((rsp) => {
const headers = rsp.headers;
const headersLower: Record<string, string> = {};
for (const key of Object.keys(headers)) {
headersLower[key.toLowerCase()] = headers[key];
}
const stream = new ReadableStream<Uint8Array>({
start(controller) {
controller.enqueue(new Uint8Array(rsp.arrayBuffer));
controller.close();
},
});
return {
response: new HttpResponse({
headers: headersLower,
statusCode: rsp.status,
body: stream,
}),
};
}),
requestTimeout(this.requestTimeoutInMs),
];
if (abortSignal) {
raceOfPromises.push(
new Promise<never>((resolve, reject) => {
abortSignal.onabort = () => {
const abortError = new Error("Request aborted");
abortError.name = "AbortError";
reject(abortError);
};
})
);
}
return Promise.race(raceOfPromises);
}
}
////////////////////////////////////////////////////////////////////////////////
// other stuffs
////////////////////////////////////////////////////////////////////////////////
export const DEFAULT_S3_CONFIG: S3Config = {
s3Endpoint: "",
s3Region: "",
s3AccessKeyID: "",
s3SecretAccessKey: "",
s3BucketName: "",
bypassCorsLocally: true,
partsConcurrency: 20,
forcePathStyle: false,
remotePrefix: "",
useAccurateMTime: false, // it causes money, disable by default
reverseProxyNoSignUrl: "",
};
export type S3ObjectType = _Object;
export const simpleTransRemotePrefix = (x: string) => {
if (x === undefined) {
return "";
}
let y = path.posix.normalize(x.trim());
if (y === undefined || y === "" || y === "/" || y === ".") {
return "";
}
if (y.startsWith("/")) {
y = y.slice(1);
}
if (!y.endsWith("/")) {
y = `${y}/`;
}
return y;
};
const getRemoteWithPrefixPath = (
fileOrFolderPath: string,
remotePrefix: string
) => {
let key = fileOrFolderPath;
if (fileOrFolderPath === "/" || fileOrFolderPath === "") {
// special
key = remotePrefix;
}
if (!fileOrFolderPath.startsWith("/")) {
key = `${remotePrefix}${fileOrFolderPath}`;
}
return key;
};
const getLocalNoPrefixPath = (
fileOrFolderPathWithRemotePrefix: string,
remotePrefix: string
) => {
if (
!(
fileOrFolderPathWithRemotePrefix === `${remotePrefix}` ||
fileOrFolderPathWithRemotePrefix.startsWith(`${remotePrefix}`)
)
) {
throw Error(
`"${fileOrFolderPathWithRemotePrefix}" doesn't starts with "${remotePrefix}"`
);
}
return fileOrFolderPathWithRemotePrefix.slice(`${remotePrefix}`.length);
};
const fromS3ObjectToEntity = (
x: S3ObjectType,
remotePrefix: string,
mtimeRecords: Record<string, number>,
ctimeRecords: Record<string, number>
) => {
// console.debug(`fromS3ObjectToEntity: ${x.Key!}, ${JSON.stringify(x,null,2)}`);
// S3 officially only supports seconds precision!!!!!
const mtimeSvr = Math.floor(x.LastModified!.valueOf() / 1000.0) * 1000;
let mtimeCli = mtimeSvr;
if (x.Key! in mtimeRecords) {
const m2 = mtimeRecords[x.Key!];
if (m2 !== 0) {
// to be compatible with RClone, we read and store the time in seconds in new version!
if (m2 >= 1000000000000) {
// it's a millsecond, uploaded by old codes..
mtimeCli = m2;
} else {
// it's a second, uploaded by new codes of the plugin from March 24, 2024
mtimeCli = m2 * 1000;
}
}
}
const key = getLocalNoPrefixPath(x.Key!, remotePrefix);
const r: Entity = {
keyRaw: key,
mtimeSvr: mtimeSvr,
mtimeCli: mtimeCli,
sizeRaw: x.Size!,
etag: x.ETag,
synthesizedFolder: false,
};
return r;
};
const fromS3HeadObjectToEntity = (
fileOrFolderPathWithRemotePrefix: string,
x: HeadObjectCommandOutput,
remotePrefix: string
) => {
// console.debug(`fromS3HeadObjectToEntity: ${fileOrFolderPathWithRemotePrefix}: ${JSON.stringify(x,null,2)}`);
// S3 officially only supports seconds precision!!!!!
const mtimeSvr = Math.floor(x.LastModified!.valueOf() / 1000.0) * 1000;
let mtimeCli = mtimeSvr;
if (x.Metadata !== undefined) {
const m2 = Math.floor(
parseFloat(x.Metadata.mtime || x.Metadata.MTime || "0")
);
if (m2 !== 0) {
// to be compatible with RClone, we read and store the time in seconds in new version!
if (m2 >= 1000000000000) {
// it's a millsecond, uploaded by old codes..
mtimeCli = m2;
} else {
// it's a second, uploaded by new codes of the plugin from March 24, 2024
mtimeCli = m2 * 1000;
}
}
}
// console.debug(
// `fromS3HeadObjectToEntity, fileOrFolderPathWithRemotePrefix=${fileOrFolderPathWithRemotePrefix}, remotePrefix=${remotePrefix}, x=${JSON.stringify(
// x
// )} `
// );
const key = getLocalNoPrefixPath(
fileOrFolderPathWithRemotePrefix,
remotePrefix
);
// console.debug(`fromS3HeadObjectToEntity, key=${key} after removing prefix`);
return {
keyRaw: key,
mtimeSvr: mtimeSvr,
mtimeCli: mtimeCli,
sizeRaw: x.ContentLength,
etag: x.ETag,
} as Entity;
};
export const getS3Client = (s3Config: S3Config) => {
let endpoint = s3Config.s3Endpoint;
if (!(endpoint.startsWith("http://") || endpoint.startsWith("https://"))) {
endpoint = `https://${endpoint}`;
}
let s3Client: S3Client;
if (VALID_REQURL && s3Config.bypassCorsLocally) {
s3Client = new S3Client({
region: s3Config.s3Region,
endpoint: endpoint,
forcePathStyle: s3Config.forcePathStyle,
credentials: {
accessKeyId: s3Config.s3AccessKeyID,
secretAccessKey: s3Config.s3SecretAccessKey,
},
requestHandler: new ObsHttpHandler(
undefined,
s3Config.reverseProxyNoSignUrl
),
});
} else {
s3Client = new S3Client({
region: s3Config.s3Region,
endpoint: endpoint,
forcePathStyle: s3Config.forcePathStyle,
credentials: {
accessKeyId: s3Config.s3AccessKeyID,
secretAccessKey: s3Config.s3SecretAccessKey,
},
});
}
s3Client.middlewareStack.add(
(next, context) => (args) => {
(args.request as any).headers["cache-control"] = "no-cache";
return next(args);
},
{
step: "build",
}
);
return s3Client;
};
export const getRemoteMeta = async (
s3Client: S3Client,
s3Config: S3Config,
fileOrFolderPathWithRemotePrefix: string
) => {
if (
s3Config.remotePrefix !== undefined &&
s3Config.remotePrefix !== "" &&
!fileOrFolderPathWithRemotePrefix.startsWith(s3Config.remotePrefix)
) {
throw Error(`s3 getRemoteMeta should only accept prefix-ed path`);
}
const res = await s3Client.send(
new HeadObjectCommand({
Bucket: s3Config.s3BucketName,
Key: fileOrFolderPathWithRemotePrefix,
})
);
return fromS3HeadObjectToEntity(
fileOrFolderPathWithRemotePrefix,
res,
s3Config.remotePrefix ?? ""
);
};
export const uploadToRemote = async (
s3Client: S3Client,
s3Config: S3Config,
fileOrFolderPath: string,
vault: Vault | undefined,
isRecursively: boolean,
cipher: Cipher,
remoteEncryptedKey: string = "",
uploadRaw: boolean = false,
rawContent: string | ArrayBuffer = "",
rawContentMTime: number = 0,
rawContentCTime: number = 0
): Promise<UploadedType> => {
console.debug(`uploading ${fileOrFolderPath}`);
let uploadFile = fileOrFolderPath;
if (!cipher.isPasswordEmpty()) {
if (remoteEncryptedKey === undefined || remoteEncryptedKey === "") {
throw Error(
`uploadToRemote(s3) you have password but remoteEncryptedKey is empty!`
);
}
uploadFile = remoteEncryptedKey;
}
uploadFile = getRemoteWithPrefixPath(uploadFile, s3Config.remotePrefix ?? "");
// console.debug(`actual uploadFile=${uploadFile}`);
const isFolder = fileOrFolderPath.endsWith("/");
if (isFolder && isRecursively) {
throw Error("upload function doesn't implement recursive function yet!");
} else if (isFolder && !isRecursively) {
if (uploadRaw) {
throw Error(`you specify uploadRaw, but you also provide a folder key!`);
}
// folder
let mtime = 0;
let ctime = 0;
const s = await vault?.adapter?.stat(fileOrFolderPath);
if (s !== undefined && s !== null) {
mtime = s.mtime;
ctime = s.ctime;
}
const contentType = DEFAULT_CONTENT_TYPE;
await s3Client.send(
new PutObjectCommand({
Bucket: s3Config.s3BucketName,
Key: uploadFile,
Body: "",
ContentType: contentType,
Metadata: {
MTime: `${mtime / 1000.0}`,
CTime: `${ctime / 1000.0}`,
},
})
);
const res = await getRemoteMeta(s3Client, s3Config, uploadFile);
return {
entity: res,
mtimeCli: mtime,
};
} else {
// file
// we ignore isRecursively parameter here
let contentType = DEFAULT_CONTENT_TYPE;
if (cipher.isPasswordEmpty()) {
contentType =
mime.contentType(
mime.lookup(fileOrFolderPath) || DEFAULT_CONTENT_TYPE
) || DEFAULT_CONTENT_TYPE;
}
let localContent = undefined;
let mtime = 0;
let ctime = 0;
if (uploadRaw) {
if (typeof rawContent === "string") {
localContent = new TextEncoder().encode(rawContent).buffer;
} else {
localContent = rawContent;
}
mtime = rawContentMTime;
ctime = rawContentCTime;
} else {
if (vault === undefined) {
throw new Error(
`the vault variable is not passed but we want to read ${fileOrFolderPath} for S3`
);
}
localContent = await vault.adapter.readBinary(fileOrFolderPath);
const s = await vault.adapter.stat(fileOrFolderPath);
if (s !== undefined && s !== null) {
mtime = s.mtime;
ctime = s.ctime;
}
}
let remoteContent = localContent;
if (!cipher.isPasswordEmpty()) {
remoteContent = await cipher.encryptContent(localContent);
}
const bytesIn5MB = 5242880;
const body = new Uint8Array(remoteContent);
const upload = new Upload({
client: s3Client,
queueSize: s3Config.partsConcurrency, // concurrency
partSize: bytesIn5MB, // minimal 5MB by default
leavePartsOnError: false,
params: {
Bucket: s3Config.s3BucketName,
Key: uploadFile,
Body: body,
ContentType: contentType,
Metadata: {
MTime: `${mtime / 1000.0}`,
CTime: `${ctime / 1000.0}`,
},
},
});
upload.on("httpUploadProgress", (progress) => {
// console.info(progress);
});
await upload.done();
const res = await getRemoteMeta(s3Client, s3Config, uploadFile);
// console.debug(
// `uploaded ${uploadFile} with res=${JSON.stringify(res, null, 2)}`
// );
return {
entity: res,
mtimeCli: mtime,
};
}
};
const listFromRemoteRaw = async (
s3Client: S3Client,
s3Config: S3Config,
prefixOfRawKeys?: string
) => {
const confCmd = {
Bucket: s3Config.s3BucketName,
} as ListObjectsV2CommandInput;
if (prefixOfRawKeys !== undefined && prefixOfRawKeys !== "") {
confCmd.Prefix = prefixOfRawKeys;
}
const contents = [] as _Object[];
const mtimeRecords: Record<string, number> = {};
const ctimeRecords: Record<string, number> = {};
const queueHead = new PQueue({
concurrency: s3Config.partsConcurrency,
autoStart: true,
});
queueHead.on("error", (error) => {
queueHead.pause();
queueHead.clear();
throw error;
});
let isTruncated = true;
do {
const rsp = await s3Client.send(new ListObjectsV2Command(confCmd));
if (rsp.$metadata.httpStatusCode !== 200) {
throw Error("some thing bad while listing remote!");
}
if (rsp.Contents === undefined) {
break;
}
contents.push(...rsp.Contents);
if (s3Config.useAccurateMTime) {
// head requests of all objects, love it
for (const content of rsp.Contents) {
queueHead.add(async () => {
const rspHead = await s3Client.send(
new HeadObjectCommand({
Bucket: s3Config.s3BucketName,
Key: content.Key,
})
);
if (rspHead.$metadata.httpStatusCode !== 200) {
throw Error("some thing bad while heading single object!");
}
if (rspHead.Metadata === undefined) {
// pass
} else {
mtimeRecords[content.Key!] = Math.floor(
parseFloat(
rspHead.Metadata.mtime || rspHead.Metadata.MTime || "0"
)
);
ctimeRecords[content.Key!] = Math.floor(
parseFloat(
rspHead.Metadata.ctime || rspHead.Metadata.CTime || "0"
)
);
}
});
}
}
isTruncated = rsp.IsTruncated ?? false;
confCmd.ContinuationToken = rsp.NextContinuationToken;
if (
isTruncated &&
(confCmd.ContinuationToken === undefined ||
confCmd.ContinuationToken === "")
) {
throw Error("isTruncated is true but no continuationToken provided");
}
} while (isTruncated);
// wait for any head requests
await queueHead.onIdle();
// ensemble fake rsp
// in the end, we need to transform the response list
// back to the local contents-alike list
return contents.map((x) =>
fromS3ObjectToEntity(
x,
s3Config.remotePrefix ?? "",
mtimeRecords,
ctimeRecords
)
);
};
export const listAllFromRemote = async (
s3Client: S3Client,
s3Config: S3Config
) => {
const res = (
await listFromRemoteRaw(s3Client, s3Config, s3Config.remotePrefix)
).filter((x) => x.keyRaw !== "" && x.keyRaw !== "/");
return res;
};
/**
* The Body of resp of aws GetObject has mix types
* and we want to get ArrayBuffer here.
* See https://github.com/aws/aws-sdk-js-v3/issues/1877
* @param b The Body of GetObject
* @returns Promise<ArrayBuffer>
*/
const getObjectBodyToArrayBuffer = async (
b: Readable | ReadableStream | Blob | undefined
) => {
if (b === undefined) {
throw Error(`ObjectBody is undefined and don't know how to deal with it`);
}
if (b instanceof Readable) {
return (await new Promise((resolve, reject) => {
const chunks: Uint8Array[] = [];
b.on("data", (chunk) => chunks.push(chunk));
b.on("error", reject);
b.on("end", () => resolve(bufferToArrayBuffer(Buffer.concat(chunks))));
})) as ArrayBuffer;
} else if (b instanceof ReadableStream) {
return await new Response(b, {}).arrayBuffer();
} else if (b instanceof Blob) {
return await b.arrayBuffer();
} else {
throw TypeError(`The type of ${b} is not one of the supported types`);
}
};
const downloadFromRemoteRaw = async (
s3Client: S3Client,
s3Config: S3Config,
fileOrFolderPathWithRemotePrefix: string
) => {
if (
s3Config.remotePrefix !== undefined &&
s3Config.remotePrefix !== "" &&
!fileOrFolderPathWithRemotePrefix.startsWith(s3Config.remotePrefix)
) {
throw Error(`downloadFromRemoteRaw should only accept prefix-ed path`);
}
const data = await s3Client.send(
new GetObjectCommand({
Bucket: s3Config.s3BucketName,
Key: fileOrFolderPathWithRemotePrefix,
})
);
const bodyContents = await getObjectBodyToArrayBuffer(data.Body);
return bodyContents;
};
export const downloadFromRemote = async (
s3Client: S3Client,
s3Config: S3Config,
fileOrFolderPath: string,
vault: Vault,
mtime: number,
cipher: Cipher,
remoteEncryptedKey: string,
skipSaving: boolean = false
) => {
const isFolder = fileOrFolderPath.endsWith("/");
if (!skipSaving) {
await mkdirpInVault(fileOrFolderPath, vault);
}
// the file is always local file
// we need to encrypt it
if (isFolder) {
// mkdirp locally is enough
// do nothing here
return new ArrayBuffer(0);
} else {
let downloadFile = fileOrFolderPath;
if (!cipher.isPasswordEmpty()) {
downloadFile = remoteEncryptedKey;
}
downloadFile = getRemoteWithPrefixPath(
downloadFile,
s3Config.remotePrefix ?? ""
);
const remoteContent = await downloadFromRemoteRaw(
s3Client,
s3Config,
downloadFile
);
let localContent = remoteContent;
if (!cipher.isPasswordEmpty()) {
localContent = await cipher.decryptContent(remoteContent);
}
if (!skipSaving) {
await vault.adapter.writeBinary(fileOrFolderPath, localContent, {
mtime: mtime,
});
}
return localContent;
}
};
/**
* This function deals with file normally and "folder" recursively.
* @param s3Client
* @param s3Config
* @param fileOrFolderPath
* @returns
*/
export const deleteFromRemote = async (
s3Client: S3Client,
s3Config: S3Config,
fileOrFolderPath: string,
cipher: Cipher,
remoteEncryptedKey: string = "",
synthesizedFolder: boolean = false
) => {
if (fileOrFolderPath === "/") {
return;
}
if (synthesizedFolder) {
return;
}
let remoteFileName = fileOrFolderPath;
if (!cipher.isPasswordEmpty()) {
remoteFileName = remoteEncryptedKey;
}
remoteFileName = getRemoteWithPrefixPath(
remoteFileName,
s3Config.remotePrefix ?? ""
);
await s3Client.send(
new DeleteObjectCommand({
Bucket: s3Config.s3BucketName,
Key: remoteFileName,
})
);
if (fileOrFolderPath.endsWith("/") && cipher.isPasswordEmpty()) {
const x = await listFromRemoteRaw(s3Client, s3Config, remoteFileName);
x.forEach(async (element) => {
await s3Client.send(
new DeleteObjectCommand({
Bucket: s3Config.s3BucketName,
Key: element.key,
})
);
});
} else if (fileOrFolderPath.endsWith("/") && !cipher.isPasswordEmpty()) {
// TODO
} else {
// pass
}
};
/**
* Check the config of S3 by heading bucket
* https://stackoverflow.com/questions/50842835
*
* Updated on 20240102:
* Users are not always have permission of heading bucket,
* so we need to use listing objects instead...
*
* @param s3Client
* @param s3Config
* @returns
*/
export const checkConnectivity = async (
s3Client: S3Client,
s3Config: S3Config,
callbackFunc?: any
) => {
try {
// TODO: no universal way now, just check this in connectivity
if (Platform.isIosApp && s3Config.s3Endpoint.startsWith("http://")) {
throw Error(
`Your s3 endpoint could only be https, not http, because of the iOS restriction.`
);
}
// const results = await s3Client.send(
// new HeadBucketCommand({ Bucket: s3Config.s3BucketName })
// );
// very simplified version of listing objects
const confCmd = {
Bucket: s3Config.s3BucketName,
} as ListObjectsV2CommandInput;
const results = await s3Client.send(new ListObjectsV2Command(confCmd));
if (
results === undefined ||
results.$metadata === undefined ||
results.$metadata.httpStatusCode === undefined
) {
const err = "results or $metadata or httStatusCode is undefined";
console.debug(err);
if (callbackFunc !== undefined) {
callbackFunc(err);
}
return false;
}
return results.$metadata.httpStatusCode === 200;
} catch (err: any) {
console.debug(err);
if (callbackFunc !== undefined) {
if (s3Config.s3Endpoint.contains(s3Config.s3BucketName)) {
const err2 = new AggregateError([
err,
new Error(
"Maybe you've included the bucket name inside the endpoint setting. Please remove the bucket name and try again."
),
]);
callbackFunc(err2);
} else {
callbackFunc(err);
}
}
return false;
}
};

View File

@ -1,606 +0,0 @@
import { Buffer } from "buffer";
import { Platform, Vault, requestUrl } from "obsidian";
import { Queue } from "@fyears/tsqueue";
import chunk from "lodash/chunk";
import flatten from "lodash/flatten";
import cloneDeep from "lodash/cloneDeep";
import { getReasonPhrase } from "http-status-codes";
import { Entity, UploadedType, VALID_REQURL, WebdavConfig } from "./baseTypes";
import { bufferToArrayBuffer, getPathFolder, mkdirpInVault } from "./misc";
import { Cipher } from "./encryptUnified";
import type {
FileStat,
WebDAVClient,
RequestOptionsWithState,
// Response,
// ResponseDataDetailed,
} from "webdav";
/**
* https://stackoverflow.com/questions/32850898/how-to-check-if-a-string-has-any-non-iso-8859-1-characters-with-javascript
* @param str
* @returns true if all are iso 8859 1 chars
*/
function onlyAscii(str: string) {
return !/[^\u0000-\u00ff]/g.test(str);
}
/**
* https://stackoverflow.com/questions/12539574/
* @param obj
* @returns
*/
function objKeyToLower(obj: Record<string, string>) {
return Object.fromEntries(
Object.entries(obj).map(([k, v]) => [k.toLowerCase(), v])
);
}
// @ts-ignore
import { getPatcher } from "webdav/dist/web/index.js";
if (VALID_REQURL) {
getPatcher().patch(
"request",
async (options: RequestOptionsWithState): Promise<Response> => {
const transformedHeaders = objKeyToLower({ ...options.headers });
delete transformedHeaders["host"];
delete transformedHeaders["content-length"];
const reqContentType =
transformedHeaders["accept"] ?? transformedHeaders["content-type"];
const retractedHeaders = { ...transformedHeaders };
if (retractedHeaders.hasOwnProperty("authorization")) {
retractedHeaders["authorization"] = "<retracted>";
}
console.debug(`before request:`);
console.debug(`url: ${options.url}`);
console.debug(`method: ${options.method}`);
console.debug(`headers: ${JSON.stringify(retractedHeaders, null, 2)}`);
console.debug(`reqContentType: ${reqContentType}`);
let r = await requestUrl({
url: options.url,
method: options.method,
body: options.data as string | ArrayBuffer,
headers: transformedHeaders,
contentType: reqContentType,
throw: false,
});
if (
r.status === 401 &&
Platform.isIosApp &&
!options.url.endsWith("/") &&
!options.url.endsWith(".md") &&
options.method.toUpperCase() === "PROPFIND"
) {
// don't ask me why,
// some webdav servers have some mysterious behaviours,
// if a folder doesn't exist without slash, the servers return 401 instead of 404
// here is a dirty hack that works
console.debug(`so we have 401, try appending request url with slash`);
r = await requestUrl({
url: `${options.url}/`,
method: options.method,
body: options.data as string | ArrayBuffer,
headers: transformedHeaders,
contentType: reqContentType,
throw: false,
});
}
console.debug(`after request:`);
const rspHeaders = objKeyToLower({ ...r.headers });
console.debug(`rspHeaders: ${JSON.stringify(rspHeaders, null, 2)}`);
for (let key in rspHeaders) {
if (rspHeaders.hasOwnProperty(key)) {
// avoid the error:
// Failed to read the 'headers' property from 'ResponseInit': String contains non ISO-8859-1 code point.
// const possibleNonAscii = [
// "Content-Disposition",
// "X-Accel-Redirect",
// "X-Outfilename",
// "X-Sendfile"
// ];
// for (const p of possibleNonAscii) {
// if (key === p || key === p.toLowerCase()) {
// rspHeaders[key] = encodeURIComponent(rspHeaders[key]);
// }
// }
if (!onlyAscii(rspHeaders[key])) {
console.debug(`rspHeaders[key] needs encode: ${key}`);
rspHeaders[key] = encodeURIComponent(rspHeaders[key]);
}
}
}
let r2: Response | undefined = undefined;
const statusText = getReasonPhrase(r.status);
console.debug(`statusText: ${statusText}`);
if ([101, 103, 204, 205, 304].includes(r.status)) {
// A null body status is a status that is 101, 103, 204, 205, or 304.
// https://fetch.spec.whatwg.org/#statuses
// fix this: Failed to construct 'Response': Response with null body status cannot have body
r2 = new Response(null, {
status: r.status,
statusText: statusText,
headers: rspHeaders,
});
} else {
r2 = new Response(r.arrayBuffer, {
status: r.status,
statusText: statusText,
headers: rspHeaders,
});
}
return r2;
}
);
}
// @ts-ignore
import { AuthType, BufferLike, createClient } from "webdav/dist/web/index.js";
export type { WebDAVClient } from "webdav";
export const DEFAULT_WEBDAV_CONFIG = {
address: "",
username: "",
password: "",
authType: "basic",
manualRecursive: true,
depth: "manual_1",
remoteBaseDir: "",
} as WebdavConfig;
const getWebdavPath = (fileOrFolderPath: string, remoteBaseDir: string) => {
let key = fileOrFolderPath;
if (fileOrFolderPath === "/" || fileOrFolderPath === "") {
// special
key = `/${remoteBaseDir}/`;
} else if (fileOrFolderPath.startsWith("/")) {
console.warn(
`why the path ${fileOrFolderPath} starts with '/'? but we just go on.`
);
key = `/${remoteBaseDir}${fileOrFolderPath}`;
} else {
key = `/${remoteBaseDir}/${fileOrFolderPath}`;
}
return key;
};
const getNormPath = (fileOrFolderPath: string, remoteBaseDir: string) => {
if (
!(
fileOrFolderPath === `/${remoteBaseDir}` ||
fileOrFolderPath.startsWith(`/${remoteBaseDir}/`)
)
) {
throw Error(
`"${fileOrFolderPath}" doesn't starts with "/${remoteBaseDir}/"`
);
}
return fileOrFolderPath.slice(`/${remoteBaseDir}/`.length);
};
const fromWebdavItemToEntity = (x: FileStat, remoteBaseDir: string) => {
let key = getNormPath(x.filename, remoteBaseDir);
if (x.type === "directory" && !key.endsWith("/")) {
key = `${key}/`;
}
const mtimeSvr = Date.parse(x.lastmod).valueOf();
return {
keyRaw: key,
mtimeSvr: mtimeSvr,
mtimeCli: mtimeSvr, // no universal way to set mtime in webdav
sizeRaw: x.size,
etag: x.etag,
} as Entity;
};
export class WrappedWebdavClient {
webdavConfig: WebdavConfig;
remoteBaseDir: string;
client!: WebDAVClient;
vaultFolderExists: boolean;
saveUpdatedConfigFunc: () => Promise<any>;
constructor(
webdavConfig: WebdavConfig,
remoteBaseDir: string,
saveUpdatedConfigFunc: () => Promise<any>
) {
this.webdavConfig = cloneDeep(webdavConfig);
this.webdavConfig.address = encodeURI(this.webdavConfig.address);
this.remoteBaseDir = remoteBaseDir;
this.vaultFolderExists = false;
this.saveUpdatedConfigFunc = saveUpdatedConfigFunc;
}
init = async () => {
// init client if not inited
if (this.client !== undefined) {
return;
}
if (Platform.isIosApp && !this.webdavConfig.address.startsWith("https")) {
throw Error(
`Your webdav address could only be https, not http, because of the iOS restriction.`
);
}
const headers = {
"Cache-Control": "no-cache",
};
if (
this.webdavConfig.username !== "" &&
this.webdavConfig.password !== ""
) {
this.client = createClient(this.webdavConfig.address, {
username: this.webdavConfig.username,
password: this.webdavConfig.password,
headers: headers,
authType:
this.webdavConfig.authType === "digest"
? AuthType.Digest
: AuthType.Password,
});
} else {
console.info("no password");
this.client = createClient(this.webdavConfig.address, {
headers: headers,
});
}
// check vault folder
if (this.vaultFolderExists) {
// pass
} else {
const res = await this.client.exists(`/${this.remoteBaseDir}/`);
if (res) {
// console.info("remote vault folder exits!");
this.vaultFolderExists = true;
} else {
console.info("remote vault folder not exists, creating");
await this.client.createDirectory(`/${this.remoteBaseDir}/`);
console.info("remote vault folder created!");
this.vaultFolderExists = true;
}
}
// adjust depth parameter
if (
this.webdavConfig.depth === "auto" ||
this.webdavConfig.depth === "auto_1" ||
this.webdavConfig.depth === "auto_infinity" ||
this.webdavConfig.depth === "auto_unknown"
) {
this.webdavConfig.depth = "manual_1";
this.webdavConfig.manualRecursive = true;
if (this.saveUpdatedConfigFunc !== undefined) {
await this.saveUpdatedConfigFunc();
console.info(
`webdav depth="auto_???" is changed to ${this.webdavConfig.depth}`
);
}
}
};
}
export const getWebdavClient = (
webdavConfig: WebdavConfig,
remoteBaseDir: string,
saveUpdatedConfigFunc: () => Promise<any>
) => {
return new WrappedWebdavClient(
webdavConfig,
remoteBaseDir,
saveUpdatedConfigFunc
);
};
/**
*
* @param client
* @param remotePath It should be prefix-ed already
* @returns
*/
export const getRemoteMeta = async (
client: WrappedWebdavClient,
remotePath: string
) => {
await client.init();
console.debug(`getRemoteMeta remotePath = ${remotePath}`);
const res = (await client.client.stat(remotePath, {
details: false,
})) as FileStat;
console.debug(`getRemoteMeta res=${JSON.stringify(res)}`);
return fromWebdavItemToEntity(res, client.remoteBaseDir);
};
export const uploadToRemote = async (
client: WrappedWebdavClient,
fileOrFolderPath: string,
vault: Vault | undefined,
isRecursively: boolean,
cipher: Cipher,
remoteEncryptedKey: string = "",
uploadRaw: boolean = false,
rawContent: string | ArrayBuffer = ""
): Promise<UploadedType> => {
await client.init();
let uploadFile = fileOrFolderPath;
if (!cipher.isPasswordEmpty()) {
if (remoteEncryptedKey === undefined || remoteEncryptedKey === "") {
throw Error(
`uploadToRemote(webdav) you have password but remoteEncryptedKey is empty!`
);
}
uploadFile = remoteEncryptedKey;
}
uploadFile = getWebdavPath(uploadFile, client.remoteBaseDir);
const isFolder = fileOrFolderPath.endsWith("/");
if (isFolder && isRecursively) {
throw Error("upload function doesn't implement recursive function yet!");
} else if (isFolder && !isRecursively) {
if (uploadRaw) {
throw Error(`you specify uploadRaw, but you also provide a folder key!`);
}
// folder
if (cipher.isPasswordEmpty() || cipher.isFolderAware()) {
// if not encrypted, || encrypted isFolderAware, mkdir a remote folder
await client.client.createDirectory(uploadFile, {
recursive: true,
});
const res = await getRemoteMeta(client, uploadFile);
return {
entity: res,
};
} else {
// if encrypted && !isFolderAware(),
// upload a fake file with the encrypted file name
await client.client.putFileContents(uploadFile, "", {
overwrite: true,
onUploadProgress: (progress: any) => {
// console.info(`Uploaded ${progress.loaded} bytes of ${progress.total}`);
},
});
return {
entity: await getRemoteMeta(client, uploadFile),
};
}
} else {
// file
// we ignore isRecursively parameter here
let localContent: ArrayBuffer | undefined = undefined;
let mtimeCli: number | undefined = undefined;
if (uploadRaw) {
if (typeof rawContent === "string") {
localContent = new TextEncoder().encode(rawContent).buffer;
} else {
localContent = rawContent;
}
} else {
if (vault == undefined) {
throw new Error(
`the vault variable is not passed but we want to read ${fileOrFolderPath} for webdav`
);
}
localContent = await vault.adapter.readBinary(fileOrFolderPath);
mtimeCli = (await vault.adapter.stat(fileOrFolderPath))?.mtime;
}
let remoteContent = localContent;
if (!cipher.isPasswordEmpty()) {
remoteContent = await cipher.encryptContent(localContent);
}
// updated 20220326: the algorithm guarantee this
// // we need to create folders before uploading
// const dir = getPathFolder(uploadFile);
// if (dir !== "/" && dir !== "") {
// await client.client.createDirectory(dir, { recursive: true });
// }
await client.client.putFileContents(uploadFile, remoteContent, {
overwrite: true,
onUploadProgress: (progress: any) => {
console.info(`Uploaded ${progress.loaded} bytes of ${progress.total}`);
},
});
return {
entity: await getRemoteMeta(client, uploadFile),
mtimeCli: mtimeCli,
};
}
};
export const listAllFromRemote = async (client: WrappedWebdavClient) => {
await client.init();
let contents = [] as FileStat[];
if (
client.webdavConfig.depth === "auto" ||
client.webdavConfig.depth === "auto_unknown" ||
client.webdavConfig.depth === "auto_1" ||
client.webdavConfig.depth === "auto_infinity" /* don't trust auto now */ ||
client.webdavConfig.depth === "manual_1"
) {
// the remote doesn't support infinity propfind,
// we need to do a bfs here
const q = new Queue([`/${client.remoteBaseDir}`]);
const CHUNK_SIZE = 10;
while (q.length > 0) {
const itemsToFetch: string[] = [];
while (q.length > 0) {
itemsToFetch.push(q.pop()!);
}
const itemsToFetchChunks = chunk(itemsToFetch, CHUNK_SIZE);
// console.debug(itemsToFetchChunks);
const subContents = [] as FileStat[];
for (const singleChunk of itemsToFetchChunks) {
const r = singleChunk.map((x) => {
return client.client.getDirectoryContents(x, {
deep: false,
details: false /* no need for verbose details here */,
// TODO: to support .obsidian,
// we need to load all files including dot,
// anyway to reduce the resources?
// glob: "/**" /* avoid dot files by using glob */,
}) as Promise<FileStat[]>;
});
const r2 = flatten(await Promise.all(r));
subContents.push(...r2);
}
for (let i = 0; i < subContents.length; ++i) {
const f = subContents[i];
contents.push(f);
if (f.type === "directory") {
q.push(f.filename);
}
}
}
} else {
// the remote supports infinity propfind
contents = (await client.client.getDirectoryContents(
`/${client.remoteBaseDir}`,
{
deep: true,
details: false /* no need for verbose details here */,
// TODO: to support .obsidian,
// we need to load all files including dot,
// anyway to reduce the resources?
// glob: "/**" /* avoid dot files by using glob */,
}
)) as FileStat[];
}
return contents.map((x) => fromWebdavItemToEntity(x, client.remoteBaseDir));
};
const downloadFromRemoteRaw = async (
client: WrappedWebdavClient,
remotePath: string
) => {
await client.init();
// console.info(`getWebdavPath=${remotePath}`);
const buff = (await client.client.getFileContents(remotePath)) as BufferLike;
if (buff instanceof ArrayBuffer) {
return buff;
} else if (buff instanceof Buffer) {
return bufferToArrayBuffer(buff);
}
throw Error(`unexpected file content result with type ${typeof buff}`);
};
export const downloadFromRemote = async (
client: WrappedWebdavClient,
fileOrFolderPath: string,
vault: Vault,
mtime: number,
cipher: Cipher,
remoteEncryptedKey: string = "",
skipSaving: boolean = false
) => {
await client.init();
const isFolder = fileOrFolderPath.endsWith("/");
if (!skipSaving) {
await mkdirpInVault(fileOrFolderPath, vault);
}
// the file is always local file
// we need to encrypt it
if (isFolder) {
// mkdirp locally is enough
// do nothing here
return new ArrayBuffer(0);
} else {
let downloadFile = fileOrFolderPath;
if (!cipher.isPasswordEmpty()) {
downloadFile = remoteEncryptedKey;
}
downloadFile = getWebdavPath(downloadFile, client.remoteBaseDir);
// console.info(`downloadFile=${downloadFile}`);
const remoteContent = await downloadFromRemoteRaw(client, downloadFile);
let localContent = remoteContent;
if (!cipher.isPasswordEmpty()) {
localContent = await cipher.decryptContent(remoteContent);
}
if (!skipSaving) {
await vault.adapter.writeBinary(fileOrFolderPath, localContent, {
mtime: mtime,
});
}
return localContent;
}
};
export const deleteFromRemote = async (
client: WrappedWebdavClient,
fileOrFolderPath: string,
cipher: Cipher,
remoteEncryptedKey: string = ""
) => {
if (fileOrFolderPath === "/") {
return;
}
let remoteFileName = fileOrFolderPath;
if (!cipher.isPasswordEmpty()) {
remoteFileName = remoteEncryptedKey;
}
remoteFileName = getWebdavPath(remoteFileName, client.remoteBaseDir);
await client.init();
try {
await client.client.deleteFile(remoteFileName);
// console.info(`delete ${remoteFileName} succeeded`);
} catch (err) {
console.error("some error while deleting");
console.error(err);
}
};
export const checkConnectivity = async (
client: WrappedWebdavClient,
callbackFunc?: any
) => {
if (
!(
client.webdavConfig.address.startsWith("http://") ||
client.webdavConfig.address.startsWith("https://")
)
) {
const err = "Error: the url should start with http(s):// but it does not!";
console.error(err);
if (callbackFunc !== undefined) {
callbackFunc(err);
}
return false;
}
try {
await client.init();
const results = await getRemoteMeta(client, `/${client.remoteBaseDir}/`);
if (results === undefined) {
const err = "results is undefined";
console.error(err);
if (callbackFunc !== undefined) {
callbackFunc(err);
}
return false;
}
return true;
} catch (err) {
console.error(err);
if (callbackFunc !== undefined) {
callbackFunc(err);
}
return false;
}
};

View File

@ -41,17 +41,17 @@ import {
upsertLastSuccessSyncTimeByVault,
} from "./localdb";
import type RemotelySavePlugin from "./main"; // unavoidable
import { RemoteClient } from "./remote";
import { FakeFs } from "./fsAll";
import {
DEFAULT_DROPBOX_CONFIG,
getAuthUrlAndVerifier as getAuthUrlAndVerifierDropbox,
sendAuthReq as sendAuthReqDropbox,
setConfigBySuccessfullAuthInplace,
} from "./remoteForDropbox";
} from "./fsDropbox";
import {
DEFAULT_ONEDRIVE_CONFIG,
getAuthUrlAndVerifier as getAuthUrlAndVerifierOnedrive,
} from "./remoteForOnedrive";
} from "./fsOnedrive";
import { messyConfigToNormal } from "./configPersist";
import type { TransItemType } from "./i18n";
import {
@ -59,8 +59,9 @@ import {
checkHasSpecialCharForDir,
stringToFragment,
} from "./misc";
import { simpleTransRemotePrefix } from "./remoteForS3";
import { simpleTransRemotePrefix } from "./fsS3";
import cloneDeep from "lodash/cloneDeep";
import { getClient } from "./fsGetter";
class PasswordModal extends Modal {
plugin: RemotelySavePlugin;
@ -468,16 +469,12 @@ class DropboxAuthModal extends Modal {
authRes!,
() => self.plugin.saveSettings()
);
const client = new RemoteClient(
"dropbox",
undefined,
undefined,
this.plugin.settings.dropbox,
undefined,
const client = getClient(
this.plugin.settings,
this.app.vault.getName(),
() => self.plugin.saveSettings()
() => this.plugin.saveSettings()
);
const username = await client.getUser();
const username = await client.getUserDisplayName();
this.plugin.settings.dropbox.username = username;
await this.plugin.saveSettings();
new Notice(
@ -1077,9 +1074,13 @@ export class RemotelySaveSettingTab extends PluginSettingTab {
button.setButtonText(t("settings_checkonnectivity_button"));
button.onClick(async () => {
new Notice(t("settings_checkonnectivity_checking"));
const client = new RemoteClient("s3", this.plugin.settings.s3);
const client = getClient(
this.plugin.settings,
this.app.vault.getName(),
() => this.plugin.saveSettings()
);
const errors = { msg: "" };
const res = await client.checkConnectivity((err: any) => {
const res = await client.checkConnect((err: any) => {
errors.msg = err;
});
if (res) {
@ -1143,14 +1144,10 @@ export class RemotelySaveSettingTab extends PluginSettingTab {
button.onClick(async () => {
try {
const self = this;
const client = new RemoteClient(
"dropbox",
undefined,
undefined,
this.plugin.settings.dropbox,
undefined,
const client = getClient(
this.plugin.settings,
this.app.vault.getName(),
() => self.plugin.saveSettings()
() => this.plugin.saveSettings()
);
await client.revokeAuth();
this.plugin.settings.dropbox = JSON.parse(
@ -1258,18 +1255,14 @@ export class RemotelySaveSettingTab extends PluginSettingTab {
button.onClick(async () => {
new Notice(t("settings_checkonnectivity_checking"));
const self = this;
const client = new RemoteClient(
"dropbox",
undefined,
undefined,
this.plugin.settings.dropbox,
undefined,
const client = getClient(
this.plugin.settings,
this.app.vault.getName(),
() => self.plugin.saveSettings()
() => this.plugin.saveSettings()
);
const errors = { msg: "" };
const res = await client.checkConnectivity((err: any) => {
const res = await client.checkConnect((err: any) => {
errors.msg = `${err}`;
});
if (res) {
@ -1407,18 +1400,13 @@ export class RemotelySaveSettingTab extends PluginSettingTab {
button.onClick(async () => {
new Notice(t("settings_checkonnectivity_checking"));
const self = this;
const client = new RemoteClient(
"onedrive",
undefined,
undefined,
undefined,
this.plugin.settings.onedrive,
const client = getClient(
this.plugin.settings,
this.app.vault.getName(),
() => self.plugin.saveSettings()
() => this.plugin.saveSettings()
);
const errors = { msg: "" };
const res = await client.checkConnectivity((err: any) => {
const res = await client.checkConnect((err: any) => {
errors.msg = `${err}`;
});
if (res) {
@ -1617,17 +1605,13 @@ export class RemotelySaveSettingTab extends PluginSettingTab {
button.onClick(async () => {
new Notice(t("settings_checkonnectivity_checking"));
const self = this;
const client = new RemoteClient(
"webdav",
undefined,
this.plugin.settings.webdav,
undefined,
undefined,
const client = getClient(
this.plugin.settings,
this.app.vault.getName(),
() => self.plugin.saveSettings()
() => this.plugin.saveSettings()
);
const errors = { msg: "" };
const res = await client.checkConnectivity((err: any) => {
const res = await client.checkConnect((err: any) => {
errors.msg = `${err}`;
});
if (res) {
@ -1790,72 +1774,22 @@ export class RemotelySaveSettingTab extends PluginSettingTab {
});
new Setting(basicDiv)
.setName(t("settings_saverun"))
.setDesc(t("settings_saverun_desc"))
.setName(t("settings_synconsave"))
.setDesc(t("settings_synconsave_desc"))
.addDropdown((dropdown) => {
dropdown.addOption("-1", t("settings_saverun_notset"));
dropdown.addOption(`${1000 * 1}`, t("settings_saverun_1sec"));
dropdown.addOption(`${1000 * 5}`, t("settings_saverun_5sec"));
dropdown.addOption(`${1000 * 10}`, t("settings_saverun_10sec"));
dropdown.addOption(`${1000 * 60}`, t("settings_saverun_1min"));
let runScheduled = false;
dropdown.addOption("-1", t("settings_synconsave_disable"));
dropdown.addOption("1000", t("settings_synconsave_enable"));
// for backward compatibility, we need to use a number representing seconds
let syncOnSaveEnabled = false;
if ((this.plugin.settings.syncOnSaveAfterMilliseconds ?? -1) > 0) {
syncOnSaveEnabled = true;
}
dropdown
.setValue(`${this.plugin.settings.syncOnSaveAfterMilliseconds}`)
.setValue(`${syncOnSaveEnabled ? "1000" : "-1"}`)
.onChange(async (val: string) => {
const realVal = parseInt(val);
this.plugin.settings.syncOnSaveAfterMilliseconds = realVal;
this.plugin.settings.syncOnSaveAfterMilliseconds = parseInt(val);
await this.plugin.saveSettings();
if (
(realVal === undefined || realVal === null || realVal <= 0) &&
this.plugin.syncOnSaveIntervalID !== undefined
) {
// clear
window.clearInterval(this.plugin.syncOnSaveIntervalID);
this.plugin.syncOnSaveIntervalID = undefined;
} else if (
realVal !== undefined &&
realVal !== null &&
realVal > 0
) {
const intervalID = window.setInterval(() => {
const currentFile = this.app.workspace.getActiveFile();
if (currentFile) {
// get the last modified time of the current file
// if it has been modified within the last syncOnSaveAfterMilliseconds
// then schedule a run for syncOnSaveAfterMilliseconds after it was modified
const lastModified = currentFile.stat.mtime;
const currentTime = Date.now();
// console.debug(
// `Checking if file was modified within last ${
// this.plugin.settings.syncOnSaveAfterMilliseconds / 1000
// } seconds, last modified: ${
// (currentTime - lastModified) / 1000
// } seconds ago`
// );
if (
currentTime - lastModified <
this.plugin.settings.syncOnSaveAfterMilliseconds!
) {
if (!runScheduled) {
const scheduleTimeFromNow =
this.plugin.settings.syncOnSaveAfterMilliseconds! -
(currentTime - lastModified);
console.info(
`schedule a run for ${scheduleTimeFromNow} milliseconds later`
);
runScheduled = true;
setTimeout(() => {
this.plugin.syncRun("auto_sync_on_save");
runScheduled = false;
}, scheduleTimeFromNow);
}
}
}
}, realVal);
this.plugin.syncOnSaveIntervalID = intervalID;
this.plugin.registerInterval(intervalID);
}
this.plugin.toggleSyncOnSaveIfSet();
});
});

View File

@ -1,157 +1,38 @@
import PQueue from "p-queue";
import XRegExp from "xregexp";
import type {
CipherMethodType,
import {
ConflictActionType,
EmptyFolderCleanType,
Entity,
MixedEntity,
RemotelySavePluginSettings,
SUPPORTED_SERVICES_TYPE,
SyncDirectionType,
SyncTriggerSourceType,
} from "./baseTypes";
import { isInsideObsFolder } from "./obsFolderLister";
import { FakeFs } from "./fsAll";
import { FakeFsEncrypt } from "./fsEncrypt";
import {
InternalDBs,
clearPrevSyncRecordByVaultAndProfile,
getAllPrevSyncRecordsByVaultAndProfile,
insertSyncPlanRecordByVault,
upsertPrevSyncRecordByVaultAndProfile,
} from "./localdb";
import {
isSpecialFolderNameToSkip,
isHiddenPath,
unixTimeToStr,
getParentFolder,
isVaildText,
atWhichLevel,
mkdirpInVault,
getFolderLevels,
getParentFolder,
isHiddenPath,
isSpecialFolderNameToSkip,
unixTimeToStr,
} from "./misc";
import { Profiler } from "./profiler";
import {
DEFAULT_FILE_NAME_FOR_METADATAONREMOTE,
DEFAULT_FILE_NAME_FOR_METADATAONREMOTE2,
} from "./metadataOnRemote";
import { RemoteClient } from "./remote";
import { Vault } from "obsidian";
import AggregateError from "aggregate-error";
import {
InternalDBs,
clearPrevSyncRecordByVaultAndProfile,
upsertPrevSyncRecordByVaultAndProfile,
} from "./localdb";
import { Cipher } from "./encryptUnified";
import { Profiler } from "./profiler";
export type SyncStatusType =
| "idle"
| "preparing"
| "getting_remote_files_list"
| "getting_local_meta"
| "getting_local_prev_sync"
| "checking_password"
| "generating_plan"
| "syncing"
| "cleaning"
| "finish";
export interface PasswordCheckType {
ok: boolean;
reason:
| "empty_remote"
| "unknown_encryption_method"
| "remote_encrypted_local_no_password"
| "password_matched"
| "password_or_method_not_matched_or_remote_not_encrypted"
| "likely_no_password_both_sides"
| "encryption_method_not_matched";
}
export const isPasswordOk = async (
remote: Entity[],
cipher: Cipher
): Promise<PasswordCheckType> => {
if (remote === undefined || remote.length === 0) {
// remote empty
return {
ok: true,
reason: "empty_remote",
};
}
const santyCheckKey = remote[0].keyRaw;
if (cipher.isPasswordEmpty()) {
// TODO: no way to distinguish remote rclone encrypted
// if local has no password??
if (Cipher.isLikelyEncryptedName(santyCheckKey)) {
return {
ok: false,
reason: "remote_encrypted_local_no_password",
};
} else {
return {
ok: true,
reason: "likely_no_password_both_sides",
};
}
} else {
if (cipher.method === "unknown") {
return {
ok: false,
reason: "unknown_encryption_method",
};
}
if (
Cipher.isLikelyEncryptedNameNotMatchMethod(santyCheckKey, cipher.method)
) {
return {
ok: false,
reason: "encryption_method_not_matched",
};
}
try {
const k = await cipher.decryptName(santyCheckKey);
if (k === undefined) {
throw Error(`decryption failed`);
}
return {
ok: true,
reason: "password_matched",
};
} catch (error) {
return {
ok: false,
reason: "password_or_method_not_matched_or_remote_not_encrypted",
};
}
}
};
const isSkipItemByName = (
key: string,
syncConfigDir: boolean,
syncUnderscoreItems: boolean,
configDir: string,
ignorePaths: string[]
) => {
if (key === undefined) {
throw Error(`isSkipItemByName meets undefinded key!`);
}
if (ignorePaths !== undefined && ignorePaths.length > 0) {
for (const r of ignorePaths) {
if (XRegExp(r, "A").test(key)) {
return true;
}
}
}
if (syncConfigDir && isInsideObsFolder(key, configDir)) {
return false;
}
if (isSpecialFolderNameToSkip(key, [])) {
// some special dirs and files are always skipped
return true;
}
return (
isHiddenPath(key, true, false) ||
(!syncUnderscoreItems && isHiddenPath(key, false, true)) ||
key === "/" ||
key === DEFAULT_FILE_NAME_FOR_METADATAONREMOTE ||
key === DEFAULT_FILE_NAME_FOR_METADATAONREMOTE2
);
};
import PQueue from "p-queue";
const copyEntityAndFixTimeFormat = (
src: Entity,
@ -195,52 +76,6 @@ const copyEntityAndFixTimeFormat = (
return result;
};
/**
* Inplace, no copy again.
*/
const decryptRemoteEntityInplace = async (remote: Entity, cipher: Cipher) => {
if (cipher?.isPasswordEmpty()) {
remote.key = remote.keyRaw;
remote.keyEnc = remote.keyRaw;
remote.size = remote.sizeRaw;
remote.sizeEnc = remote.sizeRaw;
return remote;
}
remote.keyEnc = remote.keyRaw;
remote.key = await cipher.decryptName(remote.keyEnc);
remote.sizeEnc = remote.sizeRaw;
// TODO
// remote.size = getSizeFromEncToOrig(remote.sizeEnc, password);
// but we don't have deterministic way to get a number because the encryption has padding...
return remote;
};
const fullfillMTimeOfRemoteEntityInplace = (
remote: Entity,
mtimeCli?: number
) => {
// TODO:
// on 20240405, we find that dropbox's mtimeCli is not updated
// if the content is not updated even the time is updated...
// so we do not check remote.mtimeCli for now..
if (
mtimeCli !== undefined &&
mtimeCli > 0 /* &&
(remote.mtimeCli === undefined ||
remote.mtimeCli <= 0 ||
(remote.mtimeSvr !== undefined &&
remote.mtimeSvr > 0 &&
remote.mtimeCli >= remote.mtimeSvr))
*/
) {
remote.mtimeCli = mtimeCli;
}
return remote;
};
/**
* Directly throw error here.
* We can only defer the checking now, because before decryption we don't know whether it's a file or folder.
@ -265,62 +100,49 @@ const ensureMTimeOfRemoteEntityValid = (remote: Entity) => {
return remote;
};
/**
* Inplace, no copy again.
*/
const encryptLocalEntityInplace = async (
local: Entity,
cipher: Cipher,
remoteKeyEnc: string | undefined
const isInsideObsFolder = (x: string, configDir: string) => {
if (!configDir.startsWith(".")) {
throw Error(`configDir should starts with . but we get ${configDir}`);
}
return x === configDir || x.startsWith(`${configDir}/`);
};
const isSkipItemByName = (
key: string,
syncConfigDir: boolean,
syncUnderscoreItems: boolean,
configDir: string,
ignorePaths: string[]
) => {
// console.debug(
// `encryptLocalEntityInplace: local=${JSON.stringify(
// local,
// null,
// 2
// )}, password=${
// password === undefined || password === "" ? "[empty]" : "[not empty]"
// }, remoteKeyEnc=${remoteKeyEnc}`
// );
if (local.key === undefined) {
// local.key should always have value
throw Error(`local ${local.keyRaw} is abnormal without key`);
if (key === undefined) {
throw Error(`isSkipItemByName meets undefinded key!`);
}
if (cipher.isPasswordEmpty()) {
local.sizeEnc = local.sizeRaw; // if no enc, the remote file has the same size
local.keyEnc = local.keyRaw;
return local;
}
// below is for having password
if (local.sizeEnc === undefined && local.size !== undefined) {
// it's not filled yet, we fill it
// local.size is possibly undefined if it's "prevSync" Entity
// but local.key should always have value
local.sizeEnc = cipher.getSizeFromOrigToEnc(local.size);
}
if (local.keyEnc === undefined || local.keyEnc === "") {
if (
remoteKeyEnc !== undefined &&
remoteKeyEnc !== "" &&
remoteKeyEnc !== local.key
) {
// we can reuse remote encrypted key if any
local.keyEnc = remoteKeyEnc;
} else {
// we assign a new encrypted key because of no remote
local.keyEnc = await cipher.encryptName(local.key);
if (ignorePaths !== undefined && ignorePaths.length > 0) {
for (const r of ignorePaths) {
if (XRegExp(r, "A").test(key)) {
return true;
}
}
}
return local;
if (syncConfigDir && isInsideObsFolder(key, configDir)) {
return false;
}
if (isSpecialFolderNameToSkip(key, [])) {
// some special dirs and files are always skipped
return true;
}
return (
isHiddenPath(key, true, false) ||
(!syncUnderscoreItems && isHiddenPath(key, false, true)) ||
key === "/" ||
key === DEFAULT_FILE_NAME_FOR_METADATAONREMOTE ||
key === DEFAULT_FILE_NAME_FOR_METADATAONREMOTE2
);
};
export type SyncPlanType = Record<string, MixedEntity>;
export const ensembleMixedEnties = async (
const ensembleMixedEnties = async (
localEntityList: Entity[],
prevSyncEntityList: Entity[],
remoteEntityList: Entity[],
@ -329,7 +151,7 @@ export const ensembleMixedEnties = async (
configDir: string,
syncUnderscoreItems: boolean,
ignorePaths: string[],
cipher: Cipher,
fsEncrypt: FakeFsEncrypt,
serviceType: SUPPORTED_SERVICES_TYPE,
profiler: Profiler
@ -345,10 +167,7 @@ export const ensembleMixedEnties = async (
// we also have to synthesize folders here
for (const remote of remoteEntityList) {
const remoteCopied = ensureMTimeOfRemoteEntityValid(
await decryptRemoteEntityInplace(
copyEntityAndFixTimeFormat(remote, serviceType),
cipher
)
copyEntityAndFixTimeFormat(remote, serviceType)
);
const key = remoteCopied.key!;
@ -433,19 +252,13 @@ export const ensembleMixedEnties = async (
continue;
}
// TODO: abstraction leaking?
const prevSyncCopied = await fsEncrypt.encryptEntity(
copyEntityAndFixTimeFormat(prevSync, serviceType)
);
if (finalMappings.hasOwnProperty(key)) {
const prevSyncCopied = await encryptLocalEntityInplace(
copyEntityAndFixTimeFormat(prevSync, serviceType),
cipher,
finalMappings[key].remote?.keyEnc
);
finalMappings[key].prevSync = prevSyncCopied;
} else {
const prevSyncCopied = await encryptLocalEntityInplace(
copyEntityAndFixTimeFormat(prevSync, serviceType),
cipher,
undefined
);
finalMappings[key] = {
key: key,
prevSync: prevSyncCopied,
@ -473,19 +286,13 @@ export const ensembleMixedEnties = async (
continue;
}
// TODO: abstraction leaking?
const localCopied = await fsEncrypt.encryptEntity(
copyEntityAndFixTimeFormat(local, serviceType)
);
if (finalMappings.hasOwnProperty(key)) {
const localCopied = await encryptLocalEntityInplace(
copyEntityAndFixTimeFormat(local, serviceType),
cipher,
finalMappings[key].remote?.keyEnc
);
finalMappings[key].local = localCopied;
} else {
const localCopied = await encryptLocalEntityInplace(
copyEntityAndFixTimeFormat(local, serviceType),
cipher,
undefined
);
finalMappings[key] = {
key: key,
local: localCopied,
@ -508,7 +315,7 @@ export const ensembleMixedEnties = async (
* Basically follow the sync algorithm of https://github.com/Jwink3101/syncrclone
* Also deal with syncDirection which makes it more complicated
*/
export const getSyncPlanInplace = async (
const getSyncPlanInplace = async (
mixedEntityMappings: Record<string, MixedEntity>,
howToCleanEmptyFolder: EmptyFolderCleanType,
skipSizeLargerThan: number,
@ -940,6 +747,7 @@ export const getSyncPlanInplace = async (
mixedEntityMappings["/$@meta"] = {
key: "/$@meta", // don't mess up with the types
sideNotes: {
version: "2024047 fs version",
generateTime: currTime,
generateTimeFmt: currTimeFmt,
},
@ -1093,16 +901,96 @@ const splitFourStepsOnEntityMappings = (
};
};
const fullfillMTimeOfRemoteEntityInplace = (
remote: Entity,
mtimeCli?: number
) => {
// TODO:
// on 20240405, we find that dropbox's mtimeCli is not updated
// if the content is not updated even the time is updated...
// so we do not check remote.mtimeCli for now..
if (
mtimeCli !== undefined &&
mtimeCli > 0 /* &&
(remote.mtimeCli === undefined ||
remote.mtimeCli <= 0 ||
(remote.mtimeSvr !== undefined &&
remote.mtimeSvr > 0 &&
remote.mtimeCli >= remote.mtimeSvr))
*/
) {
remote.mtimeCli = mtimeCli;
}
return remote;
};
async function copyFolder(
key: string,
left: FakeFs,
right: FakeFs
): Promise<Entity> {
if (!key.endsWith("/")) {
throw Error(`should not call ${key} in copyFolder`);
}
const statsLeft = await left.stat(key);
return await right.mkdir(key, statsLeft.mtimeCli);
}
async function copyFile(
key: string,
left: FakeFs,
right: FakeFs
): Promise<Entity> {
// console.debug(`copyFile: key=${key}, left=${left.kind}, right=${right.kind}`);
if (key.endsWith("/")) {
throw Error(`should not call ${key} in copyFile`);
}
const statsLeft = await left.stat(key);
const content = await left.readFile(key);
if (statsLeft.size === undefined) {
statsLeft.size = content.byteLength;
} else {
if (statsLeft.size !== content.byteLength) {
throw Error(
`error copying ${left.kind}=>${right.kind}: size not matched`
);
}
}
if (statsLeft.mtimeCli === undefined) {
throw Error(`error copying ${left.kind}=>${right.kind}, no mtimeCli`);
}
// console.debug(`copyFile: about to start right.writeFile`);
return await right.writeFile(
key,
content,
statsLeft.mtimeCli,
statsLeft.mtimeCli /* TODO */
);
}
async function copyFileOrFolder(
key: string,
left: FakeFs,
right: FakeFs
): Promise<Entity> {
if (key.endsWith("/")) {
return await copyFolder(key, left, right);
} else {
return await copyFile(key, left, right);
}
}
const dispatchOperationToActualV3 = async (
key: string,
vaultRandomID: string,
profileID: string,
r: MixedEntity,
client: RemoteClient,
db: InternalDBs,
vault: Vault,
localDeleteFunc: any,
cipher: Cipher
fsLocal: FakeFs,
fsEncrypt: FakeFsEncrypt,
db: InternalDBs
) => {
// console.debug(
// `inside dispatchOperationToActualV3, key=${key}, r=${JSON.stringify(
@ -1136,11 +1024,8 @@ const dispatchOperationToActualV3 = async (
// if we don't have prevSync, we use remote entity AND local mtime
// as if it is "uploaded"
if (r.remote !== undefined) {
let entity = await decryptRemoteEntityInplace(r.remote, cipher);
entity = await fullfillMTimeOfRemoteEntityInplace(
entity,
r.local?.mtimeCli
);
let entity = r.remote;
entity = fullfillMTimeOfRemoteEntityInplace(entity, r.local?.mtimeCli);
if (entity !== undefined) {
await upsertPrevSyncRecordByVaultAndProfile(
@ -1159,38 +1044,17 @@ const dispatchOperationToActualV3 = async (
r.decision === "conflict_created_then_keep_local" ||
r.decision === "conflict_modified_then_keep_local"
) {
if (
client.serviceType === "onedrive" &&
r.local!.size === 0 &&
cipher.isPasswordEmpty()
) {
// special treatment for empty files for OneDrive
// TODO: it's ugly, any other way?
// special treatment for OneDrive: do nothing, skip empty file without encryption
// if it's empty folder, or it's encrypted file/folder, it continues to be uploaded.
} else {
// console.debug(`before upload in sync, r=${JSON.stringify(r, null, 2)}`);
const { entity, mtimeCli } = await client.uploadToRemote(
r.key,
vault,
false,
cipher,
r.local!.keyEnc
);
// console.debug(`after uploadToRemote`);
// console.debug(`entity=${JSON.stringify(entity,null,2)}`)
// console.debug(`mtimeCli=${mtimeCli}`)
await decryptRemoteEntityInplace(entity, cipher);
// console.debug(`after dec, entity=${JSON.stringify(entity,null,2)}`)
await fullfillMTimeOfRemoteEntityInplace(entity, mtimeCli);
// console.debug(`after fullfill, entity=${JSON.stringify(entity,null,2)}`)
await upsertPrevSyncRecordByVaultAndProfile(
db,
vaultRandomID,
profileID,
entity
);
}
// console.debug(`before upload in sync, r=${JSON.stringify(r, null, 2)}`);
const mtimeCli = (await fsLocal.stat(r.key)).mtimeCli!;
const entity = await copyFileOrFolder(r.key, fsLocal, fsEncrypt);
fullfillMTimeOfRemoteEntityInplace(entity, mtimeCli);
// console.debug(`after fullfill, entity=${JSON.stringify(entity,null,2)}`)
await upsertPrevSyncRecordByVaultAndProfile(
db,
vaultRandomID,
profileID,
entity
);
} else if (
r.decision === "remote_is_modified_then_pull" ||
r.decision === "remote_is_created_then_pull" ||
@ -1198,14 +1062,11 @@ const dispatchOperationToActualV3 = async (
r.decision === "conflict_modified_then_keep_remote" ||
r.decision === "folder_existed_remote_then_also_create_local"
) {
await mkdirpInVault(r.key, vault);
await client.downloadFromRemote(
r.key,
vault,
r.remote!.mtimeCli!,
cipher,
r.remote!.keyEnc
);
if (r.key.endsWith("/")) {
await fsLocal.mkdir(r.key);
} else {
await copyFile(r.key, fsEncrypt, fsLocal);
}
await upsertPrevSyncRecordByVaultAndProfile(
db,
vaultRandomID,
@ -1214,12 +1075,7 @@ const dispatchOperationToActualV3 = async (
);
} else if (r.decision === "local_is_deleted_thus_also_delete_remote") {
// local is deleted, we need to delete remote now
await client.deleteFromRemote(
r.key,
cipher,
r.remote!.keyEnc,
r.remote!.synthesizedFolder
);
await fsEncrypt.rm(r.key);
await clearPrevSyncRecordByVaultAndProfile(
db,
vaultRandomID,
@ -1228,7 +1084,7 @@ const dispatchOperationToActualV3 = async (
);
} else if (r.decision === "remote_is_deleted_thus_also_delete_local") {
// remote is deleted, we need to delete local now
await localDeleteFunc(r.key);
await fsLocal.rm(r.key);
await clearPrevSyncRecordByVaultAndProfile(
db,
vaultRandomID,
@ -1241,17 +1097,8 @@ const dispatchOperationToActualV3 = async (
) {
throw Error(`${r.decision} not implemented yet: ${JSON.stringify(r)}`);
} else if (r.decision === "folder_to_be_created") {
await mkdirpInVault(r.key, vault);
const { entity, mtimeCli } = await client.uploadToRemote(
r.key,
vault,
false,
cipher,
r.local!.keyEnc
);
// we need to decrypt the key!!!
await decryptRemoteEntityInplace(entity, cipher);
await fullfillMTimeOfRemoteEntityInplace(entity, mtimeCli);
await fsLocal.mkdir(r.key);
const entity = await copyFolder(r.key, fsLocal, fsEncrypt);
await upsertPrevSyncRecordByVaultAndProfile(
db,
vaultRandomID,
@ -1267,18 +1114,13 @@ const dispatchOperationToActualV3 = async (
r.decision === "folder_to_be_deleted_on_both" ||
r.decision === "folder_to_be_deleted_on_local"
) {
await localDeleteFunc(r.key);
await fsLocal.rm(r.key);
}
if (
r.decision === "folder_to_be_deleted_on_both" ||
r.decision === "folder_to_be_deleted_on_remote"
) {
await client.deleteFromRemote(
r.key,
cipher,
r.remote!.keyEnc,
r.remote!.synthesizedFolder
);
await fsEncrypt.rm(r.key);
}
await clearPrevSyncRecordByVaultAndProfile(
db,
@ -1293,18 +1135,16 @@ const dispatchOperationToActualV3 = async (
export const doActualSync = async (
mixedEntityMappings: Record<string, MixedEntity>,
client: RemoteClient,
fsLocal: FakeFs,
fsEncrypt: FakeFsEncrypt,
vaultRandomID: string,
profileID: string,
vault: Vault,
cipher: Cipher,
concurrency: number,
localDeleteFunc: any,
protectModifyPercentage: number,
getProtectModifyPercentageErrorStrFunc: any,
callbackSyncProcess: any,
db: InternalDBs,
profiler: Profiler
profiler: Profiler,
callbackSyncProcess?: any
) => {
profiler.addIndent();
profiler.insert("doActualSync: enter");
@ -1400,27 +1240,23 @@ export const doActualSync = async (
// `start syncing "${key}" with plan ${JSON.stringify(val)}`
// );
if (callbackSyncProcess !== undefined) {
await callbackSyncProcess(
realCounter,
realTotalCount,
key,
val.decision
);
await callbackSyncProcess?.(
realCounter,
realTotalCount,
key,
val.decision
);
realCounter += 1;
}
realCounter += 1;
await dispatchOperationToActualV3(
key,
vaultRandomID,
profileID,
val,
client,
db,
vault,
localDeleteFunc,
cipher
fsLocal,
fsEncrypt,
db
);
// console.debug(`finished ${key}`);
@ -1456,3 +1292,191 @@ export const doActualSync = async (
profiler.insert(`doActualSync: exit`);
profiler.removeIndent();
};
export type SyncStatusType =
| "idle"
| "preparing"
| "getting_remote_files_list"
| "getting_local_meta"
| "getting_local_prev_sync"
| "checking_password"
| "generating_plan"
| "syncing"
| "cleaning"
| "finish";
/**
* Every input variable should be mockable, so that testable.
*/
export async function syncer(
fsLocal: FakeFs,
fsRemote: FakeFs,
fsEncrypt: FakeFsEncrypt,
profiler: Profiler,
db: InternalDBs,
triggerSource: SyncTriggerSourceType,
profileID: string,
vaultRandomID: string,
configDir: string,
settings: RemotelySavePluginSettings,
getProtectModifyPercentageErrorStrFunc: any,
markIsSyncingFunc: (isSyncing: boolean) => void,
notifyFunc?: (s: SyncTriggerSourceType, step: number) => Promise<any>,
errNotifyFunc?: (s: SyncTriggerSourceType, error: Error) => Promise<any>,
ribboonFunc?: (s: SyncTriggerSourceType, step: number) => Promise<any>,
statusBarFunc?: (s: SyncTriggerSourceType, step: number) => any,
callbackSyncProcess?: any
) {
markIsSyncingFunc(true);
let step = 0; // dry mode only
await notifyFunc?.(triggerSource, step);
step = 1;
await notifyFunc?.(triggerSource, step);
await ribboonFunc?.(triggerSource, step);
await statusBarFunc?.(triggerSource, step);
profiler.insert("start big sync func");
try {
if (fsEncrypt.innerFs !== fsRemote) {
throw Error(`your enc should has inner of the remote`);
}
const passwordCheckResult = await fsEncrypt.isPasswordOk();
if (!passwordCheckResult.ok) {
throw Error(passwordCheckResult.reason);
}
await notifyFunc?.(triggerSource, step);
await ribboonFunc?.(triggerSource, step);
await statusBarFunc?.(triggerSource, step);
profiler.insert(
`finish step${step} (list partial remote and check password)`
);
step = 2;
const remoteEntityList = await fsEncrypt.walk();
console.debug(`remoteEntityList:`);
console.debug(remoteEntityList);
await notifyFunc?.(triggerSource, step);
await ribboonFunc?.(triggerSource, step);
await statusBarFunc?.(triggerSource, step);
profiler.insert(`finish step${step} (list remote)`);
step = 3;
const localEntityList = await fsLocal.walk();
console.debug(`localEntityList:`);
console.debug(localEntityList);
await notifyFunc?.(triggerSource, step);
await ribboonFunc?.(triggerSource, step);
await statusBarFunc?.(triggerSource, step);
profiler.insert(`finish step${step} (list local)`);
step = 4;
const prevSyncEntityList = await getAllPrevSyncRecordsByVaultAndProfile(
db,
vaultRandomID,
profileID
);
console.debug(`prevSyncEntityList:`);
console.debug(prevSyncEntityList);
await notifyFunc?.(triggerSource, step);
await ribboonFunc?.(triggerSource, step);
await statusBarFunc?.(triggerSource, step);
profiler.insert(`finish step${step} (prev sync)`);
step = 5;
let mixedEntityMappings = await ensembleMixedEnties(
localEntityList,
prevSyncEntityList,
remoteEntityList,
settings.syncConfigDir ?? false,
configDir,
settings.syncUnderscoreItems ?? false,
settings.ignorePaths ?? [],
fsEncrypt,
settings.serviceType,
profiler
);
await notifyFunc?.(triggerSource, step);
await ribboonFunc?.(triggerSource, step);
await statusBarFunc?.(triggerSource, step);
profiler.insert(`finish step${step} (build partial mixedEntity)`);
step = 6;
mixedEntityMappings = await getSyncPlanInplace(
mixedEntityMappings,
settings.howToCleanEmptyFolder ?? "skip",
settings.skipSizeLargerThan ?? -1,
settings.conflictAction ?? "keep_newer",
settings.syncDirection ?? "bidirectional",
profiler
);
console.info(`mixedEntityMappings:`);
console.info(mixedEntityMappings); // for debugging
await notifyFunc?.(triggerSource, step);
await ribboonFunc?.(triggerSource, step);
await statusBarFunc?.(triggerSource, step);
profiler.insert("finish building full sync plan");
await insertSyncPlanRecordByVault(
db,
mixedEntityMappings,
vaultRandomID,
settings.serviceType
);
await notifyFunc?.(triggerSource, step);
await ribboonFunc?.(triggerSource, step);
await statusBarFunc?.(triggerSource, step);
profiler.insert("finish writing sync plan");
profiler.insert(`finish step${step} (make plan)`);
// The operations above are almost read only and kind of safe.
// The operations below begins to write or delete (!!!) something.
step = 7;
if (triggerSource !== "dry") {
await doActualSync(
mixedEntityMappings,
fsLocal,
fsEncrypt,
vaultRandomID,
profileID,
settings.concurrency ?? 5,
settings.protectModifyPercentage ?? 50,
getProtectModifyPercentageErrorStrFunc,
db,
profiler,
callbackSyncProcess
);
await notifyFunc?.(triggerSource, step);
await ribboonFunc?.(triggerSource, step);
await statusBarFunc?.(triggerSource, step);
profiler.insert(`finish step${step} (actual sync)`);
} else {
await notifyFunc?.(triggerSource, step);
await ribboonFunc?.(triggerSource, step);
await statusBarFunc?.(triggerSource, step);
profiler.insert(
`finish step${step} (skip actual sync because of dry run)`
);
}
} catch (error: any) {
profiler.insert("start error branch");
await errNotifyFunc?.(triggerSource, error as Error);
profiler.insert("finish error branch");
} finally {
}
profiler.insert("finish syncRun");
console.debug(profiler.toString());
await profiler.save(db, vaultRandomID, settings.serviceType);
step = 8;
await notifyFunc?.(triggerSource, step);
await ribboonFunc?.(triggerSource, step);
await statusBarFunc?.(triggerSource, step);
markIsSyncingFunc(false);
}

View File

@ -119,7 +119,7 @@ export class SyncAlgoV3Modal extends Modal {
this.plugin.saveAgreeToUseNewSyncAlgorithm();
this.plugin.enableAutoSyncIfSet();
this.plugin.enableInitSyncIfSet();
this.plugin.enableSyncOnSaveIfSet();
this.plugin.toggleSyncOnSaveIfSet();
} else {
console.info("do not agree to use the new algorithm");
this.plugin.unload();

View File

@ -68,6 +68,7 @@ module.exports = {
// crypto: false,
// domain: require.resolve("domain-browser"),
// events: require.resolve("events"),
fs: false,
http: false,
// http: require.resolve("stream-http"),
https: false,
@ -87,6 +88,7 @@ module.exports = {
url: require.resolve("url/"),
// util: require.resolve("util"),
// vm: require.resolve("vm-browserify"),
vm: false,
// zlib: require.resolve("browserify-zlib"),
},
},