use loglevel everywhere

This commit is contained in:
fyears 2022-01-05 00:10:55 +08:00
parent 3b195f33c8
commit 8671ba4660
16 changed files with 157 additions and 112 deletions

View File

@ -12,64 +12,64 @@ No professional designers are here. Thus the following steps involve many progra
1. use excalidraw and export png and svg. 1. use excalidraw and export png and svg.
```bash ```bash
# results # results
logo.excalidraw logo.excalidraw
logo.png logo.png
logo.svg logo.svg
``` ```
2. manually edit the `logo.svg` and make background transparent. 2. manually edit the `logo.svg` and make background transparent.
```bash ```bash
# results # results
logo-transparent.svg logo-transparent.svg
``` ```
3. use python library [`svgutils`](https://github.com/btel/svg_utils) to make a strictly square figure. The [doc](https://svgutils.readthedocs.io/en/latest/tutorials/composing_multipanel_figures.html) is very useful. 3. use python library [`svgutils`](https://github.com/btel/svg_utils) to make a strictly square figure. The [doc](https://svgutils.readthedocs.io/en/latest/tutorials/composing_multipanel_figures.html) is very useful.
```python ```python
from svgutils.compose import * from svgutils.compose import *
def get_standard_300x300(file_name): def get_standard_300x300(file_name):
fig = Figure(300, 300, fig = Figure(300, 300,
Panel( Panel(
SVG(file_name), SVG(file_name),
).move(-3, 12), ).move(-3, 12),
) )
return fig return fig
get_standard_300x300('logo-transparent.svg').save('300x300.svg') get_standard_300x300('logo-transparent.svg').save('300x300.svg')
# def get_other_size_from_standard(file_name, px): # def get_other_size_from_standard(file_name, px):
# fig = Figure(px, px, # fig = Figure(px, px,
# Panel( # Panel(
# SVG(file_name).scale(px/300.0), # SVG(file_name).scale(px/300.0),
# ).move(-3*px/300.0, 12*px/300.0), # ).move(-3*px/300.0, 12*px/300.0),
# ) # )
# return fig # return fig
# get_other_size_from_standard('logo.svg',256).save('256x256.svg') # get_other_size_from_standard('logo.svg',256).save('256x256.svg')
``` ```
```bash ```bash
# results # results
300x300.svg 300x300.svg
``` ```
4. use `inkscape` command line to get different sizes' `.png` files. 4. use `inkscape` command line to get different sizes' `.png` files.
```bash ```bash
inkscape 300x300.svg -o 300x300.png inkscape 300x300.svg -o 300x300.png
inkscape 300x300.svg -o 50x50.png -w 50 -h 50 inkscape 300x300.svg -o 50x50.png -w 50 -h 50
inkscape 300x300.svg -o 64x64.png -w 64 -h 64
inkscape 300x300.svg -o 256x256.png -w 256 -h 256
```
```bash inkscape 300x300.svg -o 64x64.png -w 64 -h 64
# results inkscape 300x300.svg -o 256x256.png -w 256 -h 256
50x50.png ```
64x64.png
256x256.png ```bash
``` # results
50x50.png
64x64.png
256x256.png
```

View File

@ -62,6 +62,7 @@
"dropbox": "^10.22.0", "dropbox": "^10.22.0",
"localforage": "^1.10.0", "localforage": "^1.10.0",
"lodash": "^4.17.21", "lodash": "^4.17.21",
"loglevel": "^1.8.0",
"mime-types": "^2.1.33", "mime-types": "^2.1.33",
"obsidian": "^0.12.0", "obsidian": "^0.12.0",
"path-browserify": "^1.0.1", "path-browserify": "^1.0.1",

View File

@ -52,6 +52,7 @@ export interface RemotelySavePluginSettings {
onedrive: OnedriveConfig; onedrive: OnedriveConfig;
password: string; password: string;
serviceType: SUPPORTED_SERVICES_TYPE; serviceType: SUPPORTED_SERVICES_TYPE;
currLogLevel?: string;
} }
export interface RemoteItem { export interface RemoteItem {

View File

@ -5,11 +5,14 @@ import { readAllSyncPlanRecordTexts } from "./localdb";
import type { InternalDBs } from "./localdb"; import type { InternalDBs } from "./localdb";
import { mkdirpInVault } from "./misc"; import { mkdirpInVault } from "./misc";
import * as origLog from "loglevel";
const log = origLog.getLogger("rs-default");
const DEFAULT_DEBUG_FOLDER = "_debug_remotely_save/"; const DEFAULT_DEBUG_FOLDER = "_debug_remotely_save/";
const DEFAULT_SYNC_PLANS_HISTORY_FILE_PREFIX = "sync_plans_hist_exported_on_"; const DEFAULT_SYNC_PLANS_HISTORY_FILE_PREFIX = "sync_plans_hist_exported_on_";
export const exportSyncPlansToFiles = async (db: InternalDBs, vault: Vault) => { export const exportSyncPlansToFiles = async (db: InternalDBs, vault: Vault) => {
console.log("exporting"); log.info("exporting");
await mkdirpInVault(DEFAULT_DEBUG_FOLDER, vault); await mkdirpInVault(DEFAULT_DEBUG_FOLDER, vault);
const records = await readAllSyncPlanRecordTexts(db); const records = await readAllSyncPlanRecordTexts(db);
let md = ""; let md = "";
@ -25,5 +28,5 @@ export const exportSyncPlansToFiles = async (db: InternalDBs, vault: Vault) => {
await vault.create(filePath, md, { await vault.create(filePath, md, {
mtime: ts, mtime: ts,
}); });
console.log("finish exporting"); log.info("finish exporting");
}; };

View File

@ -1,6 +1,9 @@
import { base32, base64url } from "rfc4648"; import { base32, base64url } from "rfc4648";
import { bufferToArrayBuffer, hexStringToTypedArray } from "./misc"; import { bufferToArrayBuffer, hexStringToTypedArray } from "./misc";
import * as origLog from "loglevel";
const log = origLog.getLogger("rs-default");
const DEFAULT_ITER = 20000; const DEFAULT_ITER = 20000;
// base32.stringify(Buffer.from('Salted__')) // base32.stringify(Buffer.from('Salted__'))

View File

@ -7,6 +7,9 @@ import {
RemotelySavePluginSettings, RemotelySavePluginSettings,
} from "./baseTypes"; } from "./baseTypes";
import * as origLog from "loglevel";
const log = origLog.getLogger("rs-default");
export const exportQrCodeUri = async ( export const exportQrCodeUri = async (
settings: RemotelySavePluginSettings, settings: RemotelySavePluginSettings,
currentVaultName: string, currentVaultName: string,
@ -19,7 +22,7 @@ export const exportQrCodeUri = async (
const vault = encodeURIComponent(currentVaultName); const vault = encodeURIComponent(currentVaultName);
const version = encodeURIComponent(pluginVersion); const version = encodeURIComponent(pluginVersion);
const rawUri = `obsidian://${COMMAND_URI}?func=settings&version=${version}&vault=${vault}&data=${data}`; const rawUri = `obsidian://${COMMAND_URI}?func=settings&version=${version}&vault=${vault}&data=${data}`;
// console.log(uri) // log.info(uri)
const imgUri = await QRCode.toDataURL(rawUri); const imgUri = await QRCode.toDataURL(rawUri);
return { return {
rawUri, rawUri,

View File

@ -6,6 +6,9 @@ import type { SyncPlanType } from "./sync";
export type LocalForage = typeof localforage; export type LocalForage = typeof localforage;
import * as origLog from "loglevel";
const log = origLog.getLogger("rs-default");
export const DEFAULT_DB_VERSION_NUMBER: number = 20211114; export const DEFAULT_DB_VERSION_NUMBER: number = 20211114;
export const DEFAULT_DB_NAME = "remotelysavedb"; export const DEFAULT_DB_NAME = "remotelysavedb";
export const DEFAULT_TBL_VERSION = "schemaversion"; export const DEFAULT_TBL_VERSION = "schemaversion";
@ -78,7 +81,7 @@ export const prepareDBs = async () => {
await migrateDBs(db, originalVersion, DEFAULT_DB_VERSION_NUMBER); await migrateDBs(db, originalVersion, DEFAULT_DB_VERSION_NUMBER);
} }
console.log("db connected"); log.info("db connected");
return db; return db;
}; };
@ -86,10 +89,10 @@ export const destroyDBs = async () => {
// await localforage.dropInstance({ // await localforage.dropInstance({
// name: DEFAULT_DB_NAME, // name: DEFAULT_DB_NAME,
// }); // });
// console.log("db deleted"); // log.info("db deleted");
const req = indexedDB.deleteDatabase(DEFAULT_DB_NAME); const req = indexedDB.deleteDatabase(DEFAULT_DB_NAME);
req.onsuccess = (event) => { req.onsuccess = (event) => {
console.log("db deleted"); log.info("db deleted");
}; };
req.onblocked = (event) => { req.onblocked = (event) => {
console.warn("trying to delete db but it was blocked"); console.warn("trying to delete db but it was blocked");
@ -128,7 +131,7 @@ export const insertDeleteRecord = async (
db: InternalDBs, db: InternalDBs,
fileOrFolder: TAbstractFile fileOrFolder: TAbstractFile
) => { ) => {
// console.log(fileOrFolder); // log.info(fileOrFolder);
let k: FileFolderHistoryRecord; let k: FileFolderHistoryRecord;
if (fileOrFolder instanceof TFile) { if (fileOrFolder instanceof TFile) {
k = { k = {
@ -165,7 +168,7 @@ export const insertRenameRecord = async (
fileOrFolder: TAbstractFile, fileOrFolder: TAbstractFile,
oldPath: string oldPath: string
) => { ) => {
// console.log(fileOrFolder); // log.info(fileOrFolder);
let k: FileFolderHistoryRecord; let k: FileFolderHistoryRecord;
if (fileOrFolder instanceof TFile) { if (fileOrFolder instanceof TFile) {
k = { k = {

View File

@ -35,6 +35,9 @@ import { RemotelySaveSettingTab } from "./settings";
import type { SyncStatusType } from "./sync"; import type { SyncStatusType } from "./sync";
import { doActualSync, getSyncPlan, isPasswordOk } from "./sync"; import { doActualSync, getSyncPlan, isPasswordOk } from "./sync";
import * as origLog from "loglevel";
const log = origLog.getLogger("rs-default");
const DEFAULT_SETTINGS: RemotelySavePluginSettings = { const DEFAULT_SETTINGS: RemotelySavePluginSettings = {
s3: DEFAULT_S3_CONFIG, s3: DEFAULT_S3_CONFIG,
webdav: DEFAULT_WEBDAV_CONFIG, webdav: DEFAULT_WEBDAV_CONFIG,
@ -42,6 +45,7 @@ const DEFAULT_SETTINGS: RemotelySavePluginSettings = {
onedrive: DEFAULT_ONEDRIVE_CONFIG, onedrive: DEFAULT_ONEDRIVE_CONFIG,
password: "", password: "",
serviceType: "s3", serviceType: "s3",
currLogLevel: "info",
}; };
interface OAuth2Info { interface OAuth2Info {
@ -58,9 +62,10 @@ export default class RemotelySavePlugin extends Plugin {
db: InternalDBs; db: InternalDBs;
syncStatus: SyncStatusType; syncStatus: SyncStatusType;
oauth2Info: OAuth2Info; oauth2Info: OAuth2Info;
currLogLevel: string;
async onload() { async onload() {
console.log(`loading plugin ${this.manifest.id}`); log.info(`loading plugin ${this.manifest.id}`);
this.oauth2Info = { this.oauth2Info = {
verifier: "", verifier: "",
@ -71,6 +76,11 @@ export default class RemotelySavePlugin extends Plugin {
}; // init }; // init
await this.loadSettings(); await this.loadSettings();
if (this.settings.currLogLevel !== undefined) {
log.setLevel(this.settings.currLogLevel as any);
}
await this.checkIfOauthExpires(); await this.checkIfOauthExpires();
await this.prepareDB(); await this.prepareDB();
@ -275,7 +285,7 @@ export default class RemotelySavePlugin extends Plugin {
} }
try { try {
//console.log(`huh ${this.settings.password}`) //log.info(`huh ${this.settings.password}`)
new Notice( new Notice(
`1/7 Remotely Save Sync Preparing (${this.settings.serviceType})` `1/7 Remotely Save Sync Preparing (${this.settings.serviceType})`
); );
@ -294,14 +304,14 @@ export default class RemotelySavePlugin extends Plugin {
() => self.saveSettings() () => self.saveSettings()
); );
const remoteRsp = await client.listFromRemote(); const remoteRsp = await client.listFromRemote();
// console.log(remoteRsp); // log.info(remoteRsp);
new Notice("3/7 Starting to fetch local meta data."); new Notice("3/7 Starting to fetch local meta data.");
this.syncStatus = "getting_local_meta"; this.syncStatus = "getting_local_meta";
const local = this.app.vault.getAllLoadedFiles(); const local = this.app.vault.getAllLoadedFiles();
const localHistory = await loadDeleteRenameHistoryTable(this.db); const localHistory = await loadDeleteRenameHistoryTable(this.db);
// console.log(local); // log.info(local);
// console.log(localHistory); // log.info(localHistory);
new Notice("4/7 Checking password correct or not."); new Notice("4/7 Checking password correct or not.");
this.syncStatus = "checking_password"; this.syncStatus = "checking_password";
@ -324,7 +334,7 @@ export default class RemotelySavePlugin extends Plugin {
client.serviceType, client.serviceType,
this.settings.password this.settings.password
); );
console.log(syncPlan.mixedStates); // for debugging log.info(syncPlan.mixedStates); // for debugging
await insertSyncPlanRecord(this.db, syncPlan); await insertSyncPlanRecord(this.db, syncPlan);
// The operations above are read only and kind of safe. // The operations above are read only and kind of safe.
@ -346,8 +356,8 @@ export default class RemotelySavePlugin extends Plugin {
this.syncStatus = "idle"; this.syncStatus = "idle";
} catch (error) { } catch (error) {
const msg = `Remotely Save error while ${this.syncStatus}`; const msg = `Remotely Save error while ${this.syncStatus}`;
console.log(msg); log.info(msg);
console.log(error); log.info(error);
new Notice(msg); new Notice(msg);
new Notice(error.message); new Notice(error.message);
this.syncStatus = "idle"; this.syncStatus = "idle";
@ -357,16 +367,16 @@ export default class RemotelySavePlugin extends Plugin {
this.addSettingTab(new RemotelySaveSettingTab(this.app, this)); this.addSettingTab(new RemotelySaveSettingTab(this.app, this));
// this.registerDomEvent(document, "click", (evt: MouseEvent) => { // this.registerDomEvent(document, "click", (evt: MouseEvent) => {
// console.log("click", evt); // log.info("click", evt);
// }); // });
// this.registerInterval( // this.registerInterval(
// window.setInterval(() => console.log("setInterval"), 5 * 60 * 1000) // window.setInterval(() => log.info("setInterval"), 5 * 60 * 1000)
// ); // );
} }
onunload() { onunload() {
console.log(`unloading plugin ${this.manifest.id}`); log.info(`unloading plugin ${this.manifest.id}`);
this.destroyDBs(); this.destroyDBs();
} }

View File

@ -4,6 +4,9 @@ import * as path from "path";
import { base32, base64url } from "rfc4648"; import { base32, base64url } from "rfc4648";
import XRegExp from "xregexp"; import XRegExp from "xregexp";
import * as origLog from "loglevel";
const log = origLog.getLogger("rs-default");
/** /**
* If any part of the file starts with '.' or '_' then it's a hidden file. * If any part of the file starts with '.' or '_' then it's a hidden file.
* @param item * @param item
@ -13,7 +16,7 @@ import XRegExp from "xregexp";
export const isHiddenPath = (item: string, loose: boolean = true) => { export const isHiddenPath = (item: string, loose: boolean = true) => {
const k = path.posix.normalize(item); // TODO: only unix path now const k = path.posix.normalize(item); // TODO: only unix path now
const k2 = k.split("/"); // TODO: only unix path now const k2 = k.split("/"); // TODO: only unix path now
// console.log(k2) // log.info(k2)
for (const singlePart of k2) { for (const singlePart of k2) {
if (singlePart === "." || singlePart === ".." || singlePart === "") { if (singlePart === "." || singlePart === ".." || singlePart === "") {
continue; continue;
@ -54,14 +57,14 @@ export const getFolderLevels = (x: string) => {
}; };
export const mkdirpInVault = async (thePath: string, vault: Vault) => { export const mkdirpInVault = async (thePath: string, vault: Vault) => {
// console.log(thePath); // log.info(thePath);
const foldersToBuild = getFolderLevels(thePath); const foldersToBuild = getFolderLevels(thePath);
// console.log(foldersToBuild); // log.info(foldersToBuild);
for (const folder of foldersToBuild) { for (const folder of foldersToBuild) {
const r = await vault.adapter.exists(folder); const r = await vault.adapter.exists(folder);
// console.log(r); // log.info(r);
if (!r) { if (!r) {
console.log(`mkdir ${folder}`); log.info(`mkdir ${folder}`);
await vault.adapter.mkdir(folder); await vault.adapter.mkdir(folder);
} }
} }

View File

@ -11,6 +11,9 @@ import * as onedrive from "./remoteForOnedrive";
import * as s3 from "./remoteForS3"; import * as s3 from "./remoteForS3";
import * as webdav from "./remoteForWebdav"; import * as webdav from "./remoteForWebdav";
import * as origLog from "loglevel";
const log = origLog.getLogger("rs-default");
export class RemoteClient { export class RemoteClient {
readonly serviceType: SUPPORTED_SERVICES_TYPE; readonly serviceType: SUPPORTED_SERVICES_TYPE;
readonly s3Client?: s3.S3Client; readonly s3Client?: s3.S3Client;

View File

@ -12,6 +12,9 @@ import { bufferToArrayBuffer, getFolderLevels, mkdirpInVault } from "./misc";
export { Dropbox } from "dropbox"; export { Dropbox } from "dropbox";
import * as origLog from "loglevel";
const log = origLog.getLogger("rs-default");
export const DEFAULT_DROPBOX_CONFIG: DropboxConfig = { export const DEFAULT_DROPBOX_CONFIG: DropboxConfig = {
accessToken: "", accessToken: "",
clientID: process.env.DEFAULT_DROPBOX_APP_KEY, clientID: process.env.DEFAULT_DROPBOX_APP_KEY,
@ -208,7 +211,7 @@ export const sendRefreshTokenReq = async (
appKey: string, appKey: string,
refreshToken: string refreshToken: string
) => { ) => {
console.log("start auto getting refreshed Dropbox access token."); log.info("start auto getting refreshed Dropbox access token.");
const resp1 = await fetch("https://api.dropboxapi.com/oauth2/token", { const resp1 = await fetch("https://api.dropboxapi.com/oauth2/token", {
method: "POST", method: "POST",
body: new URLSearchParams({ body: new URLSearchParams({
@ -218,7 +221,7 @@ export const sendRefreshTokenReq = async (
}), }),
}); });
const resp2 = (await resp1.json()) as DropboxSuccessAuthRes; const resp2 = (await resp1.json()) as DropboxSuccessAuthRes;
console.log("finish auto getting refreshed Dropbox access token."); log.info("finish auto getting refreshed Dropbox access token.");
return resp2; return resp2;
}; };
@ -227,7 +230,7 @@ export const setConfigBySuccessfullAuthInplace = async (
authRes: DropboxSuccessAuthRes, authRes: DropboxSuccessAuthRes,
saveUpdatedConfigFunc: () => Promise<any> | undefined saveUpdatedConfigFunc: () => Promise<any> | undefined
) => { ) => {
console.log("start updating local info of Dropbox token"); log.info("start updating local info of Dropbox token");
config.accessToken = authRes.access_token; config.accessToken = authRes.access_token;
config.accessTokenExpiresInSeconds = parseInt(authRes.expires_in); config.accessTokenExpiresInSeconds = parseInt(authRes.expires_in);
@ -249,7 +252,7 @@ export const setConfigBySuccessfullAuthInplace = async (
await saveUpdatedConfigFunc(); await saveUpdatedConfigFunc();
} }
console.log("finish updating local info of Dropbox token"); log.info("finish updating local info of Dropbox token");
}; };
//////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////
@ -308,9 +311,9 @@ export class WrappedDropboxClient {
} }
// check vault folder // check vault folder
// console.log(`checking remote has folder /${this.vaultName}`); // log.info(`checking remote has folder /${this.vaultName}`);
if (this.vaultFolderExists) { if (this.vaultFolderExists) {
// console.log(`already checked, /${this.vaultName} exist before`) // log.info(`already checked, /${this.vaultName} exist before`)
} else { } else {
const res = await this.dropbox.filesListFolder({ const res = await this.dropbox.filesListFolder({
path: "", path: "",
@ -323,14 +326,14 @@ export class WrappedDropboxClient {
} }
} }
if (!this.vaultFolderExists) { if (!this.vaultFolderExists) {
console.log(`remote does not have folder /${this.vaultName}`); log.info(`remote does not have folder /${this.vaultName}`);
await this.dropbox.filesCreateFolderV2({ await this.dropbox.filesCreateFolderV2({
path: `/${this.vaultName}`, path: `/${this.vaultName}`,
}); });
console.log(`remote folder /${this.vaultName} created`); log.info(`remote folder /${this.vaultName} created`);
this.vaultFolderExists = true; this.vaultFolderExists = true;
} else { } else {
// console.log(`remote folder /${this.vaultName} exists`); // log.info(`remote folder /${this.vaultName} exists`);
} }
} }
@ -489,7 +492,7 @@ export const listFromRemote = async (
if (res.status !== 200) { if (res.status !== 200) {
throw Error(JSON.stringify(res)); throw Error(JSON.stringify(res));
} }
// console.log(res); // log.info(res);
const contents = res.result.entries; const contents = res.result.entries;
const unifiedContents = contents const unifiedContents = contents

View File

@ -28,6 +28,9 @@ import {
mkdirpInVault, mkdirpInVault,
} from "./misc"; } from "./misc";
import * as origLog from "loglevel";
const log = origLog.getLogger("rs-default");
const SCOPES = ["User.Read", "Files.ReadWrite.AppFolder", "offline_access"]; const SCOPES = ["User.Read", "Files.ReadWrite.AppFolder", "offline_access"];
const REDIRECT_URI = `obsidian://${COMMAND_CALLBACK_ONEDRIVE}`; const REDIRECT_URI = `obsidian://${COMMAND_CALLBACK_ONEDRIVE}`;
@ -117,8 +120,8 @@ export const sendAuthReq = async (
// code: authCode, // code: authCode,
// codeVerifier: verifier, // PKCE Code Verifier // codeVerifier: verifier, // PKCE Code Verifier
// }); // });
// console.log('authResponse') // log.info('authResponse')
// console.log(authResponse) // log.info(authResponse)
// return authResponse; // return authResponse;
// Because of the CORS problem, // Because of the CORS problem,
@ -142,7 +145,7 @@ export const sendAuthReq = async (
}); });
const rsp2 = JSON.parse(rsp1); const rsp2 = JSON.parse(rsp1);
// console.log(rsp2); // log.info(rsp2);
if (rsp2.error !== undefined) { if (rsp2.error !== undefined) {
return rsp2 as AccessCodeResponseFailedType; return rsp2 as AccessCodeResponseFailedType;
@ -171,7 +174,7 @@ export const sendRefreshTokenReq = async (
}); });
const rsp2 = JSON.parse(rsp1); const rsp2 = JSON.parse(rsp1);
// console.log(rsp2); // log.info(rsp2);
if (rsp2.error !== undefined) { if (rsp2.error !== undefined) {
return rsp2 as AccessCodeResponseFailedType; return rsp2 as AccessCodeResponseFailedType;
@ -185,7 +188,7 @@ export const setConfigBySuccessfullAuthInplace = async (
authRes: AccessCodeResponseSuccessfulType, authRes: AccessCodeResponseSuccessfulType,
saveUpdatedConfigFunc: () => Promise<any> | undefined saveUpdatedConfigFunc: () => Promise<any> | undefined
) => { ) => {
console.log("start updating local info of OneDrive token"); log.info("start updating local info of OneDrive token");
config.accessToken = authRes.access_token; config.accessToken = authRes.access_token;
config.accessTokenExpiresAtTime = config.accessTokenExpiresAtTime =
Date.now() + authRes.expires_in - 5 * 60 * 1000; Date.now() + authRes.expires_in - 5 * 60 * 1000;
@ -200,7 +203,7 @@ export const setConfigBySuccessfullAuthInplace = async (
await saveUpdatedConfigFunc(); await saveUpdatedConfigFunc();
} }
console.log("finish updating local info of Onedrive token"); log.info("finish updating local info of Onedrive token");
}; };
//////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////
@ -352,7 +355,7 @@ class MyAuthProvider implements AuthenticationProvider {
this.onedriveConfig.accessTokenExpiresAtTime = this.onedriveConfig.accessTokenExpiresAtTime =
currentTs + r2.expires_in * 1000 - 60 * 2 * 1000; currentTs + r2.expires_in * 1000 - 60 * 2 * 1000;
await this.saveUpdatedConfigFunc(); await this.saveUpdatedConfigFunc();
console.log("Onedrive accessToken updated"); log.info("Onedrive accessToken updated");
return this.onedriveConfig.accessToken; return this.onedriveConfig.accessToken;
} }
}; };
@ -388,26 +391,26 @@ export class WrappedOnedriveClient {
} }
// check vault folder // check vault folder
// console.log(`checking remote has folder /${this.vaultName}`); // log.info(`checking remote has folder /${this.vaultName}`);
if (this.vaultFolderExists) { if (this.vaultFolderExists) {
// console.log(`already checked, /${this.vaultName} exist before`) // log.info(`already checked, /${this.vaultName} exist before`)
} else { } else {
const k = await this.client.api("/drive/special/approot/children").get(); const k = await this.client.api("/drive/special/approot/children").get();
// console.log(k); // log.info(k);
this.vaultFolderExists = this.vaultFolderExists =
(k.value as DriveItem[]).filter((x) => x.name === this.vaultName) (k.value as DriveItem[]).filter((x) => x.name === this.vaultName)
.length > 0; .length > 0;
if (!this.vaultFolderExists) { if (!this.vaultFolderExists) {
console.log(`remote does not have folder /${this.vaultName}`); log.info(`remote does not have folder /${this.vaultName}`);
await this.client.api("/drive/special/approot/children").post({ await this.client.api("/drive/special/approot/children").post({
name: `${this.vaultName}`, name: `${this.vaultName}`,
folder: {}, folder: {},
"@microsoft.graph.conflictBehavior": "replace", "@microsoft.graph.conflictBehavior": "replace",
}); });
console.log(`remote folder /${this.vaultName} created`); log.info(`remote folder /${this.vaultName} created`);
this.vaultFolderExists = true; this.vaultFolderExists = true;
} else { } else {
// console.log(`remote folder /${this.vaultName} exists`); // log.info(`remote folder /${this.vaultName} exists`);
} }
} }
}; };
@ -479,15 +482,15 @@ export const getRemoteMeta = async (
) => { ) => {
await client.init(); await client.init();
const remotePath = getOnedrivePath(fileOrFolderPath, client.vaultName); const remotePath = getOnedrivePath(fileOrFolderPath, client.vaultName);
// console.log(`remotePath=${remotePath}`); // log.info(`remotePath=${remotePath}`);
const rsp = await client.client const rsp = await client.client
.api(remotePath) .api(remotePath)
.select("cTag,eTag,fileSystemInfo,folder,file,name,parentReference,size") .select("cTag,eTag,fileSystemInfo,folder,file,name,parentReference,size")
.get(); .get();
// console.log(rsp); // log.info(rsp);
const driveItem = rsp as DriveItem; const driveItem = rsp as DriveItem;
const res = fromDriveItemToRemoteItem(driveItem, client.vaultName); const res = fromDriveItemToRemoteItem(driveItem, client.vaultName);
// console.log(res); // log.info(res);
return res; return res;
}; };
@ -507,7 +510,7 @@ export const uploadToRemote = async (
uploadFile = remoteEncryptedKey; uploadFile = remoteEncryptedKey;
} }
uploadFile = getOnedrivePath(uploadFile, client.vaultName); uploadFile = getOnedrivePath(uploadFile, client.vaultName);
// console.log(`uploadFile=${uploadFile}`); // log.info(`uploadFile=${uploadFile}`);
const isFolder = fileOrFolderPath.endsWith("/"); const isFolder = fileOrFolderPath.endsWith("/");
@ -567,7 +570,7 @@ export const uploadToRemote = async (
uploadEventHandlers: { uploadEventHandlers: {
progress: (range?: Range) => { progress: (range?: Range) => {
// Handle progress event // Handle progress event
// console.log( // log.info(
// `uploading ${range.minValue}-${range.maxValue} of ${fileOrFolderPath}` // `uploading ${range.minValue}-${range.maxValue} of ${fileOrFolderPath}`
// ); // );
}, },
@ -575,7 +578,7 @@ export const uploadToRemote = async (
} as LargeFileUploadTaskOptions } as LargeFileUploadTaskOptions
); );
const uploadResult: UploadResult = await task.upload(); const uploadResult: UploadResult = await task.upload();
// console.log(uploadResult) // log.info(uploadResult)
const res = await getRemoteMeta(client, uploadFile); const res = await getRemoteMeta(client, uploadFile);
return res; return res;
} }
@ -594,7 +597,7 @@ export const uploadToRemote = async (
// so use LargeFileUploadTask instead of OneDriveLargeFileUploadTask // so use LargeFileUploadTask instead of OneDriveLargeFileUploadTask
const progress = (range?: Range) => { const progress = (range?: Range) => {
// Handle progress event // Handle progress event
// console.log( // log.info(
// `uploading ${range.minValue}-${range.maxValue} of ${fileOrFolderPath}` // `uploading ${range.minValue}-${range.maxValue} of ${fileOrFolderPath}`
// ); // );
}; };
@ -631,7 +634,7 @@ export const uploadToRemote = async (
options options
); );
const uploadResult: UploadResult = await task.upload(); const uploadResult: UploadResult = await task.upload();
// console.log(uploadResult) // log.info(uploadResult)
const res = await getRemoteMeta(client, uploadFile); const res = await getRemoteMeta(client, uploadFile);
return res; return res;
} }

View File

@ -25,6 +25,9 @@ import {
export { S3Client } from "@aws-sdk/client-s3"; export { S3Client } from "@aws-sdk/client-s3";
import * as origLog from "loglevel";
const log = origLog.getLogger("rs-default");
export const DEFAULT_S3_CONFIG = { export const DEFAULT_S3_CONFIG = {
s3Endpoint: "", s3Endpoint: "",
s3Region: "", s3Region: "",
@ -150,7 +153,7 @@ export const uploadToRemote = async (
}, },
}); });
upload.on("httpUploadProgress", (progress) => { upload.on("httpUploadProgress", (progress) => {
// console.log(progress); // log.info(progress);
}); });
await upload.done(); await upload.done();

View File

@ -7,6 +7,9 @@ import { decryptArrayBuffer, encryptArrayBuffer } from "./encrypt";
import { bufferToArrayBuffer, getPathFolder, mkdirpInVault } from "./misc"; import { bufferToArrayBuffer, getPathFolder, mkdirpInVault } from "./misc";
export type { WebDAVClient } from "webdav/web"; export type { WebDAVClient } from "webdav/web";
import * as origLog from "loglevel";
const log = origLog.getLogger("rs-default");
export const DEFAULT_WEBDAV_CONFIG = { export const DEFAULT_WEBDAV_CONFIG = {
address: "", address: "",
username: "", username: "",
@ -82,7 +85,7 @@ export class WrappedWebdavClient {
: AuthType.Password, : AuthType.Password,
}); });
} else { } else {
console.log("no password"); log.info("no password");
this.client = createClient(this.webdavConfig.address); this.client = createClient(this.webdavConfig.address);
} }
} }
@ -93,12 +96,12 @@ export class WrappedWebdavClient {
} else { } else {
const res = await this.client.exists(`/${this.vaultName}`); const res = await this.client.exists(`/${this.vaultName}`);
if (res) { if (res) {
// console.log("remote vault folder exits!"); // log.info("remote vault folder exits!");
this.vaultFolderExists = true; this.vaultFolderExists = true;
} else { } else {
console.log("remote vault folder not exists, creating"); log.info("remote vault folder not exists, creating");
await this.client.createDirectory(`/${this.vaultName}`); await this.client.createDirectory(`/${this.vaultName}`);
console.log("remote vault folder created!"); log.info("remote vault folder created!");
this.vaultFolderExists = true; this.vaultFolderExists = true;
} }
} }
@ -118,7 +121,7 @@ export const getRemoteMeta = async (
) => { ) => {
await client.init(); await client.init();
const remotePath = getWebdavPath(fileOrFolderPath, client.vaultName); const remotePath = getWebdavPath(fileOrFolderPath, client.vaultName);
// console.log(`remotePath = ${remotePath}`); // log.info(`remotePath = ${remotePath}`);
const res = (await client.client.stat(remotePath, { const res = (await client.client.stat(remotePath, {
details: false, details: false,
})) as FileStat; })) as FileStat;
@ -158,7 +161,7 @@ export const uploadToRemote = async (
await client.client.putFileContents(uploadFile, "", { await client.client.putFileContents(uploadFile, "", {
overwrite: true, overwrite: true,
onUploadProgress: (progress) => { onUploadProgress: (progress) => {
// console.log(`Uploaded ${progress.loaded} bytes of ${progress.total}`); // log.info(`Uploaded ${progress.loaded} bytes of ${progress.total}`);
}, },
}); });
@ -180,7 +183,7 @@ export const uploadToRemote = async (
await client.client.putFileContents(uploadFile, remoteContent, { await client.client.putFileContents(uploadFile, remoteContent, {
overwrite: true, overwrite: true,
onUploadProgress: (progress) => { onUploadProgress: (progress) => {
console.log(`Uploaded ${progress.loaded} bytes of ${progress.total}`); log.info(`Uploaded ${progress.loaded} bytes of ${progress.total}`);
}, },
}); });
@ -282,10 +285,10 @@ export const deleteFromRemote = async (
await client.init(); await client.init();
try { try {
await client.client.deleteFile(remoteFileName); await client.client.deleteFile(remoteFileName);
// console.log(`delete ${remoteFileName} succeeded`); // log.info(`delete ${remoteFileName} succeeded`);
} catch (err) { } catch (err) {
console.error("some error while deleting"); console.error("some error while deleting");
console.log(err); log.info(err);
} }
}; };

View File

@ -20,6 +20,9 @@ import {
getAuthUrlAndVerifier as getAuthUrlAndVerifierOnedrive, getAuthUrlAndVerifier as getAuthUrlAndVerifierOnedrive,
} from "./remoteForOnedrive"; } from "./remoteForOnedrive";
import * as origLog from 'loglevel';
const log = origLog.getLogger('rs-default');
class PasswordModal extends Modal { class PasswordModal extends Modal {
plugin: RemotelySavePlugin; plugin: RemotelySavePlugin;
newPassword: string; newPassword: string;

View File

@ -582,7 +582,7 @@ export const doActualSync = async (
password, password,
foldersCreatedBefore foldersCreatedBefore
); );
// console.log(`finished ${k}, with ${setToString(foldersCreatedBefore)}`); // log.info(`finished ${k}, with ${setToString(foldersCreatedBefore)}`);
} }
// await Promise.all( // await Promise.all(
// Object.entries(keyStates) // Object.entries(keyStates)