use raw logging now!

This commit is contained in:
fyears 2024-03-17 16:03:40 +08:00
parent 996533328e
commit d7ff793715
23 changed files with 196 additions and 422 deletions

View File

@ -12,8 +12,8 @@ See [here](./export_sync_plans.md).
See [here](./check_console_output.md).
## Advanced: Save Console Output Then Read Them Later
## Advanced: Use `Logstravaganza` to export logs
This method works for desktop and mobile devices (iOS, Android).
This method works for desktop and mobile devices (iOS, Android), especially useful for iOS.
See [here](./save_console_output_and_export.md).
See [here](./use_logstravaganza.md).

View File

@ -1,25 +0,0 @@
# Save Console Output And Read Them Later
## Disable Auto Sync Firstly
You should disable auto sync to avoid any unexpected running.
## Set The Output Level To Debug
Go to the plugin settings, scroll down to the section "Debug" -> "alter console log level", and change it from "info" to "debug".
## Enable Saving The Output To DB
Go to the plugin settings, scroll down to the section "Debug" -> "Save Console Logs Into DB", and change it from "disable" to "enable". **This setting has some performance cost, so do NOT always turn this on when not necessary!**
## Run The Sync
Trigger the sync manually (by clicking the icon on the ribbon sidebar). Something (hopefully) helpful should show up in the console. The the console logs are also saved into DB now.
## Export The Output And Read The Logs
Go to the plugin settings, scroll down to the section "Debug" -> "Export Console Logs From DB", and click the button. A new file `log_hist_exported_on_....md` should be created inside the special folder `_debug_remotely_save/`. You could read it and hopefully find something useful.
## Disable Saving The Output To DB
After debugging, go to the plugin settings, scroll down to the section "Debug" -> "Save Console Logs Into DB", and change it from "enable" to "disable".

View File

@ -0,0 +1,14 @@
# Use `Logstravaganza`
On iOS, it's quite hard to directly check the console logs.
Luckily, there is a third-party plugin: [`Logstravaganza`](https://obsidian.md/plugins?search=Logstravaganza#), by Carlo Zottmann, that can redirect the output to a note.
You can just:
1. Install it.
2. Enable it.
3. Do something, to trigger some console logs.
4. Checkout `LOGGING-NOTE (device name).md` in the root of your vault.
See more on its site: <https://github.com/czottmann/obsidian-logstravaganza>.

View File

@ -3,8 +3,6 @@ import { reverseString } from "./misc";
import type { RemotelySavePluginSettings } from "./baseTypes";
import { log } from "./moreOnLog";
const DEFAULT_README: string =
"The file contains sensitive info, so DO NOT take screenshot of, copy, or share it to anyone! It's also generated automatically, so do not edit it manually.";
@ -19,10 +17,10 @@ interface MessyConfigType {
export const messyConfigToNormal = (
x: MessyConfigType | RemotelySavePluginSettings | null | undefined
): RemotelySavePluginSettings | null | undefined => {
// log.debug("loading, original config on disk:");
// log.debug(x);
// console.debug("loading, original config on disk:");
// console.debug(x);
if (x === null || x === undefined) {
log.debug("the messy config is null or undefined, skip");
console.debug("the messy config is null or undefined, skip");
return x as any;
}
if ("readme" in x && "d" in x) {
@ -35,12 +33,12 @@ export const messyConfigToNormal = (
}) as Buffer
).toString("utf-8")
);
// log.debug("loading, parsed config is:");
// log.debug(y);
// console.debug("loading, parsed config is:");
// console.debug(y);
return y;
} else {
// return as is
// log.debug("loading, parsed config is the same");
// console.debug("loading, parsed config is the same");
return x;
}
};
@ -52,7 +50,7 @@ export const normalConfigToMessy = (
x: RemotelySavePluginSettings | null | undefined
) => {
if (x === null || x === undefined) {
log.debug("the normal config is null or undefined, skip");
console.debug("the normal config is null or undefined, skip");
return x;
}
const y = {
@ -63,7 +61,7 @@ export const normalConfigToMessy = (
})
),
};
// log.debug("encoding, encoded config is:");
// log.debug(y);
// console.debug("encoding, encoded config is:");
// console.debug(y);
return y;
};

View File

@ -11,8 +11,6 @@ import {
FileOrFolderMixedState,
} from "./baseTypes";
import { log } from "./moreOnLog";
const turnSyncPlanToTable = (record: string) => {
const syncPlan: SyncPlanType = JSON.parse(record);
const { ts, tsFmt, remoteType, mixedStates } = syncPlan;
@ -77,7 +75,7 @@ export const exportVaultSyncPlansToFiles = async (
vault: Vault,
vaultRandomID: string
) => {
log.info("exporting");
console.info("exporting");
await mkdirpInVault(DEFAULT_DEBUG_FOLDER, vault);
const records = await readAllSyncPlanRecordTextsByVault(db, vaultRandomID);
let md = "";
@ -93,5 +91,5 @@ export const exportVaultSyncPlansToFiles = async (
await vault.create(filePath, md, {
mtime: ts,
});
log.info("finish exporting");
console.info("finish exporting");
};

View File

@ -1,8 +1,6 @@
import { base32, base64url } from "rfc4648";
import { bufferToArrayBuffer, hexStringToTypedArray } from "./misc";
import { log } from "./moreOnLog";
const DEFAULT_ITER = 20000;
// base32.stringify(Buffer.from('Salted__'))

View File

@ -7,8 +7,6 @@ import {
RemotelySavePluginSettings,
} from "./baseTypes";
import { log } from "./moreOnLog";
export const exportQrCodeUri = async (
settings: RemotelySavePluginSettings,
currentVaultName: string,
@ -22,7 +20,7 @@ export const exportQrCodeUri = async (
const vault = encodeURIComponent(currentVaultName);
const version = encodeURIComponent(pluginVersion);
const rawUri = `obsidian://${COMMAND_URI}?func=settings&version=${version}&vault=${vault}&data=${data}`;
// log.info(uri)
// console.info(uri)
const imgUri = await QRCode.toDataURL(rawUri);
return {
rawUri,

View File

@ -106,10 +106,6 @@
"modal_sizesconflict_desc": "You've set skipping files larger than {{thresholdMB}} MB ({{thresholdBytes}} bytes).\nBut the following files have sizes larger than the threshold on one side, and sizes smaller than the threshold on the other side.\nTo avoid unexpected overwriting or deleting, the plugin stops, and you have to manually deal with at least one side of the files.",
"modal_sizesconflict_copybutton": "Click to copy all the below sizes conflicts info",
"modal_sizesconflict_copynotice": "All the sizes conflicts info have been copied to the clipboard!",
"modal_logtohttpserver_title": "Log To HTTP(S) Server Is DANGEROUS!",
"modal_logtohttpserver_desc": "All your sensitive logging information will be posted to the HTTP(S) server without any authentications!!!!!\nPlease make sure you trust the HTTP(S) server, and it's better to setup a HTTPS one instead of HTTP one.\nIt's for debugging purposes only, especially on mobile.",
"modal_logtohttpserver_secondconfirm": "I know it's dangerous, and insist, and am willing to bear all possible losses.",
"modal_logtohttpserver_notice": "OK.",
"settings_basic": "Basic Settings",
"settings_password": "Encryption Password",
"settings_password_desc": "Password for E2E encryption. Empty for no password. You need to click \"Confirm\". Attention: the password and other info are saved locally.",
@ -264,12 +260,14 @@
"settings_import": "Import",
"settings_import_desc": "You should open a camera or scan-qrcode app, to manually scan the QR code.",
"settings_debug": "Debug",
"settings_debuglevel": "Alter Console Log Level",
"settings_debuglevel_desc": "By default the log level is \"info\". You can change to \"debug\" to get verbose information in console.",
"settings_debuglevel": "Alter Notice Level",
"settings_debuglevel_desc": "By default the notice level is \"info\". You can change to \"debug\" to get verbose information while syncing.",
"settings_outputsettingsconsole": "Output Current Settings From Disk To Console",
"settings_outputsettingsconsole_desc": "The settings save on disk in encoded. Click this to see the decoded settings in console.",
"settings_outputsettingsconsole_button": "Output",
"settings_outputsettingsconsole_notice": "Finished outputing in console.",
"settings_viewconsolelog": "View Console Log",
"settings_viewconsolelog_desc": "On desktop, please press \"ctrl+shift+i\" or \"cmd+shift+i\" to view the log. On mobile, please install the third-party plugin <a href='https://obsidian.md/plugins?search=Logstravaganza'>Logstravaganza</a> to export the console log to a note.",
"settings_syncplans": "Export Sync Plans",
"settings_syncplans_desc": "Sync plans are created every time after you trigger sync and before the actual sync. Useful to know what would actually happen in those sync. Click the button to export sync plans.",
"settings_syncplans_button_json": "Export",
@ -278,9 +276,6 @@
"settings_delsyncplans_desc": "Delete sync plans history in DB.",
"settings_delsyncplans_button": "Delete Sync Plans History",
"settings_delsyncplans_notice": "Sync plans history (in DB) deleted.",
"settings_logtohttpserver": "Log To HTTP(S) Server Temporarily",
"settings_logtohttpserver_desc": "It's very dangerous and please use the function with greate cautions!!!!! It will temporarily allow sending console loggings to HTTP(S) server.",
"settings_logtohttpserver_reset_notice": "Your input doesn't starts with \"http(s)\". Already removed the setting of logging to HTTP(S) server.",
"settings_delprevsync": "Delete Prev Sync Details In DB",
"settings_delprevsync_desc": "The sync algorithm keeps the previous successful sync information in DB to determine the file changes. If you want to ignore them so that all files are treated newly created, you can delete the prev sync info here.",
"settings_delprevsync_button": "Delete Prev Sync Details",

View File

@ -106,10 +106,6 @@
"modal_sizesconflict_desc": "您设置了跳过同步大于 {{thresholdMB}} MB{{thresholdBytes}} bytes的文件。\n但是以下文件的大小在一端大于阈值在另一端则小于阈值。\n为了避免意外的覆盖或删除插件停止了运作您需要手动处理至少一端的文件。",
"modal_sizesconflict_copybutton": "点击以复制以下所有文件大小冲突信息",
"modal_sizesconflict_copynotice": "所有的文件大小冲突信息,已被复制到剪贴板!",
"modal_logtohttpserver_title": "转发终端日志到 HTTP 服务器,此操作很危险!",
"modal_logtohttpserver_desc": "所有您的带敏感信息的终端日志,都会被转发到 HTTP(S) 服务器,没有任何鉴权!!!!!\n请确保您信任对应的服务器最好设置为 HTTPS 而不是 HTTP。\n仅仅用于 debug 用途,例如手机上的 debug。",
"modal_logtohttpserver_secondconfirm": "我知道很危险,坚持要设置,愿意承担所有可能损失。",
"modal_logtohttpserver_notice": "已设置。",
"settings_basic": "基本设置",
"settings_password": "密码",
"settings_password_desc": "端到端加密的密码。不填写则代表没密码。您需要点击“确认”来修改。注意:密码和其它信息都会在本地保存。",
@ -264,12 +260,14 @@
"settings_import": "导入",
"settings_import_desc": "您需要使用系统拍摄 app 或者扫描 QR 码的app来扫描对应的 QR 码。",
"settings_debug": "调试",
"settings_debuglevel": "修改终端输出的 level",
"settings_debuglevel_desc": "默认值为 \"info\"。您可以改为 \"debug\" 从而在终端里获取更多信息。",
"settings_debuglevel": "修改同步提示信息",
"settings_debuglevel_desc": "默认值为 \"info\"。您可以改为 \"debug\" 从而在同步时候里获取更多信息。",
"settings_outputsettingsconsole": "读取硬盘上的设置文件输出到终端",
"settings_outputsettingsconsole_desc": "硬盘上的设置文件是编码过的,点击这里从而解码并输出到终端。",
"settings_outputsettingsconsole_button": "输出",
"settings_outputsettingsconsole_notice": "已输出到终端",
"settings_viewconsolelog": "查看终端输出",
"settings_viewconsolelog_desc": "电脑上输入“ctrl+shift+i”或“cmd+shift+i”来查看终端输出。手机上安装第三方插件 <a href='https://obsidian.md/plugins?search=Logstravaganza'>Logstravaganza</a> 来导出终端输出到一篇笔记上。",
"settings_syncplans": "导出同步计划",
"settings_syncplans_desc": "每次您启动同步,并在实际上传下载前,插件会生成同步计划。它可以使您知道每次同步发生了什么。点击按钮可以导出同步计划。",
"settings_syncplans_button_json": "导出",
@ -278,9 +276,6 @@
"settings_delsyncplans_desc": "删除数据库里的同步计划历史。",
"settings_delsyncplans_button": "删除同步计划历史",
"settings_delsyncplans_notice": "(数据库里的)同步计划已被删除。",
"settings_logtohttpserver": "临时设定终端日志实时转发到 HTTP(S) 服务器。",
"settings_logtohttpserver_desc": "非常危险,谨慎行动!!!!!临时设定终端日志实时转发到 HTTP(S) 服务器。",
"settings_logtohttpserver_reset_notice": "您的输入不是“http(s)”开头的。已移除了终端日志转发到 HTTP(S) 服务器的设定。",
"settings_delprevsync": "删除数据库里的上次同步明细",
"settings_delprevsync_desc": "同步算法需要上次成功同步的信息来决定文件变更,这个信息保存在本地的数据库里。如果您想忽略这些信息从而所有文件都被视为新创建的话,可以在此删除之前的信息。",
"settings_delprevsync_button": "删除上次同步明细",

View File

@ -106,10 +106,6 @@
"modal_sizesconflict_desc": "您設定了跳過同步大於 {{thresholdMB}} MB{{thresholdBytes}} bytes的檔案。\n但是以下檔案的大小在一端大於閾值在另一端則小於閾值。\n為了避免意外的覆蓋或刪除外掛停止了運作您需要手動處理至少一端的檔案。",
"modal_sizesconflict_copybutton": "點選以複製以下所有檔案大小衝突資訊",
"modal_sizesconflict_copynotice": "所有的檔案大小衝突資訊,已被複制到剪貼簿!",
"modal_logtohttpserver_title": "轉發終端日誌到 HTTP 伺服器,此操作很危險!",
"modal_logtohttpserver_desc": "所有您的帶敏感資訊的終端日誌,都會被轉發到 HTTP(S) 伺服器,沒有任何鑑權!!!!!\n請確保您信任對應的伺服器最好設定為 HTTPS 而不是 HTTP。\n僅僅用於 debug 用途,例如手機上的 debug。",
"modal_logtohttpserver_secondconfirm": "我知道很危險,堅持要設定,願意承擔所有可能損失。",
"modal_logtohttpserver_notice": "已設定。",
"settings_basic": "基本設定",
"settings_password": "密碼",
"settings_password_desc": "端到端加密的密碼。不填寫則代表沒密碼。您需要點選“確認”來修改。注意:密碼和其它資訊都會在本地儲存。",
@ -264,12 +260,14 @@
"settings_import": "匯入",
"settings_import_desc": "您需要使用系統拍攝 app 或者掃描 QR 碼的app來掃描對應的 QR 碼。",
"settings_debug": "除錯",
"settings_debuglevel": "修改終端輸出的 level",
"settings_debuglevel_desc": "預設值為 \"info\"。您可以改為 \"debug\" 從而在終端裡獲取更多資訊。",
"settings_debuglevel": "修改同步提示資訊",
"settings_debuglevel_desc": "預設值為 \"info\"。您可以改為 \"debug\" 從而在同步時候裡獲取更多資訊。",
"settings_outputsettingsconsole": "讀取硬碟上的設定檔案輸出到終端",
"settings_outputsettingsconsole_desc": "硬碟上的設定檔案是編碼過的,點選這裡從而解碼並輸出到終端。",
"settings_outputsettingsconsole_button": "輸出",
"settings_outputsettingsconsole_notice": "已輸出到終端",
"settings_viewconsolelog": "檢視終端輸出",
"settings_viewconsolelog_desc": "電腦上輸入“ctrl+shift+i”或“cmd+shift+i”來檢視終端輸出。手機上安裝第三方外掛 <a href='https://obsidian.md/plugins?search=Logstravaganza'>Logstravaganza</a> 來匯出終端輸出到一篇筆記上。",
"settings_syncplans": "匯出同步計劃",
"settings_syncplans_desc": "每次您啟動同步,並在實際上傳下載前,外掛會生成同步計劃。它可以使您知道每次同步發生了什麼。點選按鈕可以匯出同步計劃。",
"settings_syncplans_button_json": "匯出",
@ -278,9 +276,6 @@
"settings_delsyncplans_desc": "刪除資料庫裡的同步計劃歷史。",
"settings_delsyncplans_button": "刪除同步計劃歷史",
"settings_delsyncplans_notice": "(資料庫裡的)同步計劃已被刪除。",
"settings_logtohttpserver": "臨時設定終端日誌實時轉發到 HTTP(S) 伺服器。",
"settings_logtohttpserver_desc": "非常危險,謹慎行動!!!!!臨時設定終端日誌實時轉發到 HTTP(S) 伺服器。",
"settings_logtohttpserver_reset_notice": "您的輸入不是“http(s)”開頭的。已移除了終端日誌轉發到 HTTP(S) 伺服器的設定。",
"settings_delprevsync": "刪除資料庫裡的上次同步明細",
"settings_delprevsync_desc": "同步演算法需要上次成功同步的資訊來決定檔案變更,這個資訊儲存在本地的資料庫裡。如果您想忽略這些資訊從而所有檔案都被視為新建立的話,可以在此刪除之前的資訊。",
"settings_delprevsync_button": "刪除上次同步明細",

View File

@ -8,8 +8,6 @@ import type { Entity, MixedEntity, SUPPORTED_SERVICES_TYPE } from "./baseTypes";
import type { SyncPlanType } from "./sync";
import { statFix, toText, unixTimeToStr } from "./misc";
import { log } from "./moreOnLog";
const DB_VERSION_NUMBER_IN_HISTORY = [20211114, 20220108, 20220326, 20240220];
export const DEFAULT_DB_VERSION_NUMBER: number = 20240220;
export const DEFAULT_DB_NAME = "remotelysavedb";
@ -119,7 +117,7 @@ const migrateDBsFrom20220326To20240220 = async (
) => {
const oldVer = 20220326;
const newVer = 20240220;
log.debug(`start upgrading internal db from ${oldVer} to ${newVer}`);
console.debug(`start upgrading internal db from ${oldVer} to ${newVer}`);
// from sync mapping to prev sync
const syncMappings = await getAllSyncMetaMappingByVault(db, vaultRandomID);
@ -135,7 +133,7 @@ const migrateDBsFrom20220326To20240220 = async (
// await clearAllSyncMetaMappingByVault(db, vaultRandomID);
await db.versionTbl.setItem(`${vaultRandomID}\tversion`, newVer);
log.debug(`finish upgrading internal db from ${oldVer} to ${newVer}`);
console.debug(`finish upgrading internal db from ${oldVer} to ${newVer}`);
};
const migrateDBs = async (
@ -243,7 +241,7 @@ export const prepareDBs = async (
(await db.versionTbl.getItem(`${vaultRandomID}\tversion`)) ??
(await db.versionTbl.getItem("version"));
if (originalVersion === null) {
log.debug(
console.debug(
`no internal db version, setting it to ${DEFAULT_DB_VERSION_NUMBER}`
);
// as of 20240220, we set the version per vault, instead of global "version"
@ -254,7 +252,7 @@ export const prepareDBs = async (
} else if (originalVersion === DEFAULT_DB_VERSION_NUMBER) {
// do nothing
} else {
log.debug(
console.debug(
`trying to upgrade db version from ${originalVersion} to ${DEFAULT_DB_VERSION_NUMBER}`
);
await migrateDBs(
@ -265,7 +263,7 @@ export const prepareDBs = async (
);
}
log.info("db connected");
console.info("db connected");
return {
db: db,
vaultRandomID: vaultRandomID,
@ -276,17 +274,17 @@ export const destroyDBs = async () => {
// await localforage.dropInstance({
// name: DEFAULT_DB_NAME,
// });
// log.info("db deleted");
// console.info("db deleted");
const req = indexedDB.deleteDatabase(DEFAULT_DB_NAME);
req.onsuccess = (event) => {
log.info("db deleted");
console.info("db deleted");
};
req.onblocked = (event) => {
log.warn("trying to delete db but it was blocked");
console.warn("trying to delete db but it was blocked");
};
req.onerror = (event) => {
log.error("tried to delete db but something goes wrong!");
log.error(event);
console.error("tried to delete db but something goes wrong!");
console.error(event);
};
};
@ -420,9 +418,9 @@ export const getAllPrevSyncRecordsByVault = async (
db: InternalDBs,
vaultRandomID: string
) => {
// log.debug('inside getAllPrevSyncRecordsByVault')
// console.debug('inside getAllPrevSyncRecordsByVault')
const keys = await db.prevSyncRecordsTbl.keys();
// log.debug(`inside getAllPrevSyncRecordsByVault, keys=${keys}`)
// console.debug(`inside getAllPrevSyncRecordsByVault, keys=${keys}`)
const res: Entity[] = [];
for (const key of keys) {
if (key.startsWith(`${vaultRandomID}\t`)) {
@ -468,7 +466,7 @@ export const clearAllPrevSyncRecordByVault = async (
export const clearAllLoggerOutputRecords = async (db: InternalDBs) => {
await db.loggerOutputTbl.clear();
log.debug(`successfully clearAllLoggerOutputRecords`);
console.debug(`successfully clearAllLoggerOutputRecords`);
};
export const upsertLastSuccessSyncTimeByVault = async (

View File

@ -64,7 +64,6 @@ import { I18n } from "./i18n";
import type { LangType, LangTypeAndAuto, TransItemType } from "./i18n";
import { SyncAlgoV3Modal } from "./syncAlgoV3Notice";
import { applyLogWriterInplace, log } from "./moreOnLog";
import AggregateError from "aggregate-error";
import { exportVaultSyncPlansToFiles } from "./debugMode";
import { compareVersion } from "./misc";
@ -177,7 +176,7 @@ export default class RemotelySavePlugin extends Plugin {
}
try {
log.info(
console.info(
`${
this.manifest.id
}-${Date.now()}: start sync, triggerSource=${triggerSource}`
@ -206,7 +205,7 @@ export default class RemotelySavePlugin extends Plugin {
if (this.statusBarElement !== undefined) {
this.updateLastSuccessSyncMsg(-1);
}
//log.info(`huh ${this.settings.password}`)
//console.info(`huh ${this.settings.password}`)
if (this.settings.currLogLevel === "info") {
getNotice(
t("syncrun_shortstep1", {
@ -240,8 +239,8 @@ export default class RemotelySavePlugin extends Plugin {
() => self.saveSettings()
);
const remoteEntityList = await client.listAllFromRemote();
log.debug("remoteEntityList:");
log.debug(remoteEntityList);
console.debug("remoteEntityList:");
console.debug(remoteEntityList);
if (this.settings.currLogLevel === "info") {
// pass
@ -270,8 +269,8 @@ export default class RemotelySavePlugin extends Plugin {
this.app.vault.configDir,
this.manifest.id
);
log.debug("localEntityList:");
log.debug(localEntityList);
console.debug("localEntityList:");
console.debug(localEntityList);
if (this.settings.currLogLevel === "info") {
// pass
@ -283,8 +282,8 @@ export default class RemotelySavePlugin extends Plugin {
this.db,
this.vaultRandomID
);
log.debug("prevSyncEntityList:");
log.debug(prevSyncEntityList);
console.debug("prevSyncEntityList:");
console.debug(prevSyncEntityList);
if (this.settings.currLogLevel === "info") {
// pass
@ -308,8 +307,8 @@ export default class RemotelySavePlugin extends Plugin {
this.settings.skipSizeLargerThan ?? -1,
this.settings.conflictAction ?? "keep_newer"
);
log.info(`mixedEntityMappings:`);
log.info(mixedEntityMappings); // for debugging
console.info(`mixedEntityMappings:`);
console.info(mixedEntityMappings); // for debugging
await insertSyncPlanRecordByVault(
this.db,
mixedEntityMappings,
@ -383,7 +382,7 @@ export default class RemotelySavePlugin extends Plugin {
this.updateLastSuccessSyncMsg(lastSuccessSyncMillis);
}
log.info(
console.info(
`${
this.manifest.id
}-${Date.now()}: finish sync, triggerSource=${triggerSource}`
@ -395,8 +394,8 @@ export default class RemotelySavePlugin extends Plugin {
triggerSource: triggerSource,
syncStatus: this.syncStatus,
});
log.error(msg);
log.error(error);
console.error(msg);
console.error(error);
getNotice(msg, 10 * 1000);
if (error instanceof AggregateError) {
for (const e of error.errors) {
@ -414,7 +413,7 @@ export default class RemotelySavePlugin extends Plugin {
}
async onload() {
log.info(`loading plugin ${this.manifest.id}`);
console.info(`loading plugin ${this.manifest.id}`);
const { iconSvgSyncWait, iconSvgSyncRunning, iconSvgLogs } = getIconSvg();
@ -443,10 +442,6 @@ export default class RemotelySavePlugin extends Plugin {
return this.i18n.t(x, vars);
};
if (this.settings.currLogLevel !== undefined) {
log.setLevel(this.settings.currLogLevel as any);
}
await this.checkIfOauthExpires();
// MUST before prepareDB()
@ -474,7 +469,6 @@ export default class RemotelySavePlugin extends Plugin {
}
// must AFTER preparing DB
this.redirectLoggingOuputBasedOnSetting();
this.enableAutoClearOutputToDBHistIfSet();
// must AFTER preparing DB
@ -751,7 +745,7 @@ export default class RemotelySavePlugin extends Plugin {
this.addSettingTab(new RemotelySaveSettingTab(this.app, this));
// this.registerDomEvent(document, "click", (evt: MouseEvent) => {
// log.info("click", evt);
// console.info("click", evt);
// });
if (!this.settings.agreeToUseSyncV3) {
@ -772,7 +766,7 @@ export default class RemotelySavePlugin extends Plugin {
}
async onunload() {
log.info(`unloading plugin ${this.manifest.id}`);
console.info(`unloading plugin ${this.manifest.id}`);
this.syncRibbon = undefined;
if (this.oauth2Info !== undefined) {
this.oauth2Info.helperModal = undefined;
@ -951,7 +945,7 @@ export default class RemotelySavePlugin extends Plugin {
// a real string was assigned before
vaultRandomID = this.settings.vaultRandomID;
}
log.debug("vaultRandomID is no longer saved in data.json");
console.debug("vaultRandomID is no longer saved in data.json");
delete this.settings.vaultRandomID;
await this.saveSettings();
}
@ -1031,7 +1025,7 @@ export default class RemotelySavePlugin extends Plugin {
let needToRunAgain = false;
const scheduleSyncOnSave = (scheduleTimeFromNow: number) => {
log.info(
console.info(
`schedule a run for ${scheduleTimeFromNow} milliseconds later`
);
runScheduled = true;
@ -1195,31 +1189,6 @@ export default class RemotelySavePlugin extends Plugin {
}
}
redirectLoggingOuputBasedOnSetting() {
applyLogWriterInplace((...msg: any[]) => {
if (
this.debugServerTemp !== undefined &&
this.debugServerTemp.trim().startsWith("http")
) {
try {
requestUrl({
url: this.debugServerTemp,
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({
send_time: Date.now(),
log_text: msg,
}),
});
} catch (e) {
// pass
}
}
});
}
enableAutoClearOutputToDBHistIfSet() {
const initClearOutputToDBHistAfterMilliseconds = 1000 * 30;

View File

@ -1,7 +1,6 @@
import isEqual from "lodash/isEqual";
import { base64url } from "rfc4648";
import { reverseString } from "./misc";
import { log } from "./moreOnLog";
const DEFAULT_README_FOR_METADATAONREMOTE =
"Do NOT edit or delete the file manually. This file is for the plugin remotely-save to store some necessary meta data on the remote services. Its content is slightly obfuscated.";

View File

@ -5,8 +5,6 @@ import { base32, base64url } from "rfc4648";
import XRegExp from "xregexp";
import emojiRegex from "emoji-regex";
import { log } from "./moreOnLog";
declare global {
interface Window {
moment: (...data: any) => any;
@ -30,7 +28,7 @@ export const isHiddenPath = (
}
const k = path.posix.normalize(item); // TODO: only unix path now
const k2 = k.split("/"); // TODO: only unix path now
// log.info(k2)
// console.info(k2)
for (const singlePart of k2) {
if (singlePart === "." || singlePart === ".." || singlePart === "") {
continue;
@ -75,14 +73,14 @@ export const getFolderLevels = (x: string, addEndingSlash: boolean = false) => {
};
export const mkdirpInVault = async (thePath: string, vault: Vault) => {
// log.info(thePath);
// console.info(thePath);
const foldersToBuild = getFolderLevels(thePath);
// log.info(foldersToBuild);
// console.info(foldersToBuild);
for (const folder of foldersToBuild) {
const r = await vault.adapter.exists(folder);
// log.info(r);
// console.info(r);
if (!r) {
log.info(`mkdir ${folder}`);
console.info(`mkdir ${folder}`);
await vault.adapter.mkdir(folder);
}
}

View File

@ -1,40 +0,0 @@
// It's very dangerous for this file to depend on other files in the same project.
// We should avoid this situation as much as possible.
import { TAbstractFile, TFolder, TFile, Vault } from "obsidian";
import * as origLog from "loglevel";
import type {
LogLevelNumbers,
Logger,
LogLevel,
LogLevelDesc,
LogLevelNames,
} from "loglevel";
const log2 = origLog.getLogger("rs-default");
const originalFactory = log2.methodFactory;
export const applyLogWriterInplace = function (writer: (...msg: any[]) => any) {
log2.methodFactory = function (
methodName: LogLevelNames,
logLevel: LogLevelNumbers,
loggerName: string | symbol
) {
const rawMethod = originalFactory(methodName, logLevel, loggerName);
return function (...msg: any[]) {
rawMethod.apply(undefined, msg);
writer(...msg);
};
};
log2.setLevel(log2.getLevel());
};
export const restoreLogWritterInplace = () => {
log2.methodFactory = originalFactory;
log2.setLevel(log2.getLevel());
};
export const log = log2;

View File

@ -13,8 +13,6 @@ import * as onedrive from "./remoteForOnedrive";
import * as s3 from "./remoteForS3";
import * as webdav from "./remoteForWebdav";
import { log } from "./moreOnLog";
export class RemoteClient {
readonly serviceType: SUPPORTED_SERVICES_TYPE;
readonly s3Config?: S3Config;

View File

@ -21,8 +21,6 @@ import {
export { Dropbox } from "dropbox";
import { log } from "./moreOnLog";
export const DEFAULT_DROPBOX_CONFIG: DropboxConfig = {
accessToken: "",
clientID: process.env.DEFAULT_DROPBOX_APP_KEY ?? "",
@ -43,7 +41,7 @@ export const getDropboxPath = (
// special
key = `/${remoteBaseDir}`;
} else if (fileOrFolderPath.startsWith("/")) {
log.warn(
console.warn(
`why the path ${fileOrFolderPath} starts with '/'? but we just go on.`
);
key = `/${remoteBaseDir}${fileOrFolderPath}`;
@ -169,7 +167,7 @@ export const sendAuthReq = async (
const resp2 = (await resp1.json()) as DropboxSuccessAuthRes;
return resp2;
} catch (e) {
log.error(e);
console.error(e);
if (errorCallBack !== undefined) {
await errorCallBack(e);
}
@ -181,7 +179,7 @@ export const sendRefreshTokenReq = async (
refreshToken: string
) => {
try {
log.info("start auto getting refreshed Dropbox access token.");
console.info("start auto getting refreshed Dropbox access token.");
const resp1 = await fetch("https://api.dropboxapi.com/oauth2/token", {
method: "POST",
body: new URLSearchParams({
@ -191,10 +189,10 @@ export const sendRefreshTokenReq = async (
}),
});
const resp2 = (await resp1.json()) as DropboxSuccessAuthRes;
log.info("finish auto getting refreshed Dropbox access token.");
console.info("finish auto getting refreshed Dropbox access token.");
return resp2;
} catch (e) {
log.error(e);
console.error(e);
throw e;
}
};
@ -204,7 +202,7 @@ export const setConfigBySuccessfullAuthInplace = async (
authRes: DropboxSuccessAuthRes,
saveUpdatedConfigFunc: () => Promise<any> | undefined
) => {
log.info("start updating local info of Dropbox token");
console.info("start updating local info of Dropbox token");
config.accessToken = authRes.access_token;
config.accessTokenExpiresInSeconds = parseInt(authRes.expires_in);
@ -224,7 +222,7 @@ export const setConfigBySuccessfullAuthInplace = async (
await saveUpdatedConfigFunc();
}
log.info("finish updating local info of Dropbox token");
console.info("finish updating local info of Dropbox token");
};
////////////////////////////////////////////////////////////////////////////////
@ -245,7 +243,7 @@ async function retryReq<T>(
for (let idx = 0; idx < waitSeconds.length; ++idx) {
try {
if (idx !== 0) {
log.warn(
console.warn(
`${extraHint === "" ? "" : extraHint + ": "}The ${
idx + 1
}-th try starts at time ${Date.now()}`
@ -282,7 +280,7 @@ async function retryReq<T>(
const fallbackSec = waitSeconds[idx];
const secMin = Math.max(svrSec, fallbackSec);
const secMax = Math.max(secMin * 1.8, 2);
log.warn(
console.warn(
`${
extraHint === "" ? "" : extraHint + ": "
}We have "429 too many requests" error of ${
@ -355,9 +353,9 @@ export class WrappedDropboxClient {
}
// check vault folder
// log.info(`checking remote has folder /${this.remoteBaseDir}`);
// console.info(`checking remote has folder /${this.remoteBaseDir}`);
if (this.vaultFolderExists) {
// log.info(`already checked, /${this.remoteBaseDir} exist before`)
// console.info(`already checked, /${this.remoteBaseDir} exist before`)
} else {
const res = await this.dropbox.filesListFolder({
path: "",
@ -370,7 +368,7 @@ export class WrappedDropboxClient {
}
}
if (!this.vaultFolderExists) {
log.info(`remote does not have folder /${this.remoteBaseDir}`);
console.info(`remote does not have folder /${this.remoteBaseDir}`);
if (hasEmojiInText(`/${this.remoteBaseDir}`)) {
throw new Error(
@ -381,10 +379,10 @@ export class WrappedDropboxClient {
await this.dropbox.filesCreateFolderV2({
path: `/${this.remoteBaseDir}`,
});
log.info(`remote folder /${this.remoteBaseDir} created`);
console.info(`remote folder /${this.remoteBaseDir} created`);
this.vaultFolderExists = true;
} else {
// log.info(`remote folder /${this.remoteBaseDir} exists`);
// console.info(`remote folder /${this.remoteBaseDir} exists`);
}
}
@ -612,7 +610,7 @@ export const listAllFromRemote = async (client: WrappedDropboxClient) => {
if (res.status !== 200) {
throw Error(JSON.stringify(res));
}
// log.info(res);
// console.info(res);
const contents = res.result.entries;
const unifiedContents = contents
@ -736,8 +734,8 @@ export const deleteFromRemote = async (
fileOrFolderPath
);
} catch (err) {
log.error("some error while deleting");
log.error(err);
console.error("some error while deleting");
console.error(err);
}
};
@ -753,7 +751,7 @@ export const checkConnectivity = async (
}
return true;
} catch (err) {
log.debug(err);
console.debug(err);
if (callbackFunc !== undefined) {
callbackFunc(err);
}

View File

@ -25,8 +25,6 @@ import {
mkdirpInVault,
} from "./misc";
import { log } from "./moreOnLog";
const SCOPES = ["User.Read", "Files.ReadWrite.AppFolder", "offline_access"];
const REDIRECT_URI = `obsidian://${COMMAND_CALLBACK_ONEDRIVE}`;
@ -117,8 +115,8 @@ export const sendAuthReq = async (
// code: authCode,
// codeVerifier: verifier, // PKCE Code Verifier
// });
// log.info('authResponse')
// log.info(authResponse)
// console.info('authResponse')
// console.info(authResponse)
// return authResponse;
// Because of the CORS problem,
@ -143,7 +141,7 @@ export const sendAuthReq = async (
});
const rsp2 = JSON.parse(rsp1);
// log.info(rsp2);
// console.info(rsp2);
if (rsp2.error !== undefined) {
return rsp2 as AccessCodeResponseFailedType;
@ -151,7 +149,7 @@ export const sendAuthReq = async (
return rsp2 as AccessCodeResponseSuccessfulType;
}
} catch (e) {
log.error(e);
console.error(e);
await errorCallBack(e);
}
};
@ -177,7 +175,7 @@ export const sendRefreshTokenReq = async (
});
const rsp2 = JSON.parse(rsp1);
// log.info(rsp2);
// console.info(rsp2);
if (rsp2.error !== undefined) {
return rsp2 as AccessCodeResponseFailedType;
@ -185,7 +183,7 @@ export const sendRefreshTokenReq = async (
return rsp2 as AccessCodeResponseSuccessfulType;
}
} catch (e) {
log.error(e);
console.error(e);
throw e;
}
};
@ -195,7 +193,7 @@ export const setConfigBySuccessfullAuthInplace = async (
authRes: AccessCodeResponseSuccessfulType,
saveUpdatedConfigFunc: () => Promise<any> | undefined
) => {
log.info("start updating local info of OneDrive token");
console.info("start updating local info of OneDrive token");
config.accessToken = authRes.access_token;
config.accessTokenExpiresAtTime =
Date.now() + authRes.expires_in - 5 * 60 * 1000;
@ -210,7 +208,7 @@ export const setConfigBySuccessfullAuthInplace = async (
await saveUpdatedConfigFunc();
}
log.info("finish updating local info of Onedrive token");
console.info("finish updating local info of Onedrive token");
};
////////////////////////////////////////////////////////////////////////////////
@ -231,7 +229,7 @@ const getOnedrivePath = (fileOrFolderPath: string, remoteBaseDir: string) => {
}
if (key.startsWith("/")) {
log.warn(`why the path ${key} starts with '/'? but we just go on.`);
console.warn(`why the path ${key} starts with '/'? but we just go on.`);
key = `${prefix}${key}`;
} else {
key = `${prefix}/${key}`;
@ -403,7 +401,7 @@ class MyAuthProvider implements AuthenticationProvider {
this.onedriveConfig.accessTokenExpiresAtTime =
currentTs + r2.expires_in * 1000 - 60 * 2 * 1000;
await this.saveUpdatedConfigFunc();
log.info("Onedrive accessToken updated");
console.info("Onedrive accessToken updated");
return this.onedriveConfig.accessToken;
}
};
@ -437,26 +435,26 @@ export class WrappedOnedriveClient {
}
// check vault folder
// log.info(`checking remote has folder /${this.remoteBaseDir}`);
// console.info(`checking remote has folder /${this.remoteBaseDir}`);
if (this.vaultFolderExists) {
// log.info(`already checked, /${this.remoteBaseDir} exist before`)
// console.info(`already checked, /${this.remoteBaseDir} exist before`)
} else {
const k = await this.getJson("/drive/special/approot/children");
// log.debug(k);
// console.debug(k);
this.vaultFolderExists =
(k.value as DriveItem[]).filter((x) => x.name === this.remoteBaseDir)
.length > 0;
if (!this.vaultFolderExists) {
log.info(`remote does not have folder /${this.remoteBaseDir}`);
console.info(`remote does not have folder /${this.remoteBaseDir}`);
await this.postJson("/drive/special/approot/children", {
name: `${this.remoteBaseDir}`,
folder: {},
"@microsoft.graph.conflictBehavior": "replace",
});
log.info(`remote folder /${this.remoteBaseDir} created`);
console.info(`remote folder /${this.remoteBaseDir} created`);
this.vaultFolderExists = true;
} else {
// log.info(`remote folder /${this.remoteBaseDir} exists`);
// console.info(`remote folder /${this.remoteBaseDir} exists`);
}
}
};
@ -478,7 +476,7 @@ export class WrappedOnedriveClient {
getJson = async (pathFragOrig: string) => {
const theUrl = this.buildUrl(pathFragOrig);
log.debug(`getJson, theUrl=${theUrl}`);
console.debug(`getJson, theUrl=${theUrl}`);
return JSON.parse(
await request({
url: theUrl,
@ -494,7 +492,7 @@ export class WrappedOnedriveClient {
postJson = async (pathFragOrig: string, payload: any) => {
const theUrl = this.buildUrl(pathFragOrig);
log.debug(`postJson, theUrl=${theUrl}`);
console.debug(`postJson, theUrl=${theUrl}`);
return JSON.parse(
await request({
url: theUrl,
@ -510,7 +508,7 @@ export class WrappedOnedriveClient {
patchJson = async (pathFragOrig: string, payload: any) => {
const theUrl = this.buildUrl(pathFragOrig);
log.debug(`patchJson, theUrl=${theUrl}`);
console.debug(`patchJson, theUrl=${theUrl}`);
return JSON.parse(
await request({
url: theUrl,
@ -526,7 +524,7 @@ export class WrappedOnedriveClient {
deleteJson = async (pathFragOrig: string) => {
const theUrl = this.buildUrl(pathFragOrig);
log.debug(`deleteJson, theUrl=${theUrl}`);
console.debug(`deleteJson, theUrl=${theUrl}`);
if (VALID_REQURL) {
await requestUrl({
url: theUrl,
@ -547,7 +545,7 @@ export class WrappedOnedriveClient {
putArrayBuffer = async (pathFragOrig: string, payload: ArrayBuffer) => {
const theUrl = this.buildUrl(pathFragOrig);
log.debug(`putArrayBuffer, theUrl=${theUrl}`);
console.debug(`putArrayBuffer, theUrl=${theUrl}`);
// TODO:
// 20220401: On Android, requestUrl has issue that text becomes base64.
// Use fetch everywhere instead!
@ -590,7 +588,7 @@ export class WrappedOnedriveClient {
size: number
) => {
const theUrl = this.buildUrl(pathFragOrig);
log.debug(
console.debug(
`putUint8ArrayByRange, theUrl=${theUrl}, range=${rangeStart}-${
rangeEnd - 1
}, len=${rangeEnd - rangeStart}, size=${size}`
@ -655,7 +653,7 @@ export const listAllFromRemote = async (client: WrappedOnedriveClient) => {
`/drive/special/approot:/${client.remoteBaseDir}:/delta`
);
let driveItems = res.value as DriveItem[];
// log.debug(driveItems);
// console.debug(driveItems);
while (NEXT_LINK_KEY in res) {
res = await client.getJson(res[NEXT_LINK_KEY]);
@ -681,14 +679,14 @@ export const getRemoteMeta = async (
remotePath: string
) => {
await client.init();
// log.info(`remotePath=${remotePath}`);
// console.info(`remotePath=${remotePath}`);
const rsp = await client.getJson(
`${remotePath}?$select=cTag,eTag,fileSystemInfo,folder,file,name,parentReference,size`
);
// log.info(rsp);
// console.info(rsp);
const driveItem = rsp as DriveItem;
const res = fromDriveItemToEntity(driveItem, client.remoteBaseDir);
// log.info(res);
// console.info(res);
return res;
};
@ -715,7 +713,7 @@ export const uploadToRemote = async (
uploadFile = remoteEncryptedKey;
}
uploadFile = getOnedrivePath(uploadFile, client.remoteBaseDir);
log.debug(`uploadFile=${uploadFile}`);
console.debug(`uploadFile=${uploadFile}`);
let mtime = 0;
let ctime = 0;
@ -792,7 +790,7 @@ export const uploadToRemote = async (
} as FileSystemInfo,
});
}
// log.info(uploadResult)
// console.info(uploadResult)
const res = await getRemoteMeta(client, uploadFile);
return {
entity: res,
@ -874,8 +872,8 @@ export const uploadToRemote = async (
k
);
const uploadUrl = s.uploadUrl!;
log.debug("uploadSession = ");
log.debug(s);
console.debug("uploadSession = ");
console.debug(s);
// 2. upload by ranges
// convert to uint8
@ -995,7 +993,7 @@ export const checkConnectivity = async (
const k = await getUserDisplayName(client);
return k !== "<unknown display name>";
} catch (err) {
log.debug(err);
console.debug(err);
if (callbackFunc !== undefined) {
callbackFunc(err);
}

View File

@ -42,7 +42,6 @@ import {
export { S3Client } from "@aws-sdk/client-s3";
import { log } from "./moreOnLog";
import PQueue from "p-queue";
////////////////////////////////////////////////////////////////////////////////
@ -227,7 +226,7 @@ const fromS3ObjectToEntity = (
mtimeRecords: Record<string, number>,
ctimeRecords: Record<string, number>
) => {
// log.debug(`fromS3ObjectToEntity: ${x.Key!}, ${JSON.stringify(x,null,2)}`);
// console.debug(`fromS3ObjectToEntity: ${x.Key!}, ${JSON.stringify(x,null,2)}`);
// S3 officially only supports seconds precision!!!!!
const mtimeSvr = Math.floor(x.LastModified!.valueOf() / 1000.0) * 1000;
let mtimeCli = mtimeSvr;
@ -253,7 +252,7 @@ const fromS3HeadObjectToEntity = (
x: HeadObjectCommandOutput,
remotePrefix: string
) => {
// log.debug(`fromS3HeadObjectToEntity: ${fileOrFolderPathWithRemotePrefix}: ${JSON.stringify(x,null,2)}`);
// console.debug(`fromS3HeadObjectToEntity: ${fileOrFolderPathWithRemotePrefix}: ${JSON.stringify(x,null,2)}`);
// S3 officially only supports seconds precision!!!!!
const mtimeSvr = Math.floor(x.LastModified!.valueOf() / 1000.0) * 1000;
let mtimeCli = mtimeSvr;
@ -265,7 +264,7 @@ const fromS3HeadObjectToEntity = (
mtimeCli = m2;
}
}
// log.debug(
// console.debug(
// `fromS3HeadObjectToEntity, fileOrFolderPathWithRemotePrefix=${fileOrFolderPathWithRemotePrefix}, remotePrefix=${remotePrefix}, x=${JSON.stringify(
// x
// )} `
@ -274,7 +273,7 @@ const fromS3HeadObjectToEntity = (
fileOrFolderPathWithRemotePrefix,
remotePrefix
);
// log.debug(`fromS3HeadObjectToEntity, key=${key} after removing prefix`);
// console.debug(`fromS3HeadObjectToEntity, key=${key} after removing prefix`);
return {
keyRaw: key,
mtimeSvr: mtimeSvr,
@ -367,7 +366,7 @@ export const uploadToRemote = async (
rawContentMTime: number = 0,
rawContentCTime: number = 0
): Promise<UploadedType> => {
log.debug(`uploading ${fileOrFolderPath}`);
console.debug(`uploading ${fileOrFolderPath}`);
let uploadFile = fileOrFolderPath;
if (password !== "") {
if (remoteEncryptedKey === undefined || remoteEncryptedKey === "") {
@ -378,7 +377,7 @@ export const uploadToRemote = async (
uploadFile = remoteEncryptedKey;
}
uploadFile = getRemoteWithPrefixPath(uploadFile, s3Config.remotePrefix ?? "");
// log.debug(`actual uploadFile=${uploadFile}`);
// console.debug(`actual uploadFile=${uploadFile}`);
const isFolder = fileOrFolderPath.endsWith("/");
if (isFolder && isRecursively) {
@ -472,12 +471,12 @@ export const uploadToRemote = async (
},
});
upload.on("httpUploadProgress", (progress) => {
// log.info(progress);
// console.info(progress);
});
await upload.done();
const res = await getRemoteMeta(s3Client, s3Config, uploadFile);
// log.debug(
// console.debug(
// `uploaded ${uploadFile} with res=${JSON.stringify(res, null, 2)}`
// );
return {
@ -772,7 +771,7 @@ export const checkConnectivity = async (
results.$metadata.httpStatusCode === undefined
) {
const err = "results or $metadata or httStatusCode is undefined";
log.debug(err);
console.debug(err);
if (callbackFunc !== undefined) {
callbackFunc(err);
}
@ -780,7 +779,7 @@ export const checkConnectivity = async (
}
return results.$metadata.httpStatusCode === 200;
} catch (err: any) {
log.debug(err);
console.debug(err);
if (callbackFunc !== undefined) {
if (s3Config.s3Endpoint.contains(s3Config.s3BucketName)) {
const err2 = new AggregateError([

View File

@ -9,8 +9,6 @@ import { Entity, UploadedType, VALID_REQURL, WebdavConfig } from "./baseTypes";
import { decryptArrayBuffer, encryptArrayBuffer } from "./encrypt";
import { bufferToArrayBuffer, getPathFolder, mkdirpInVault } from "./misc";
import { log } from "./moreOnLog";
import type {
FileStat,
WebDAVClient,
@ -85,9 +83,9 @@ if (VALID_REQURL) {
}
}
}
// log.info(`requesting url=${options.url}`);
// log.info(`contentType=${contentType}`);
// log.info(`rspHeaders=${JSON.stringify(rspHeaders)}`)
// console.info(`requesting url=${options.url}`);
// console.info(`contentType=${contentType}`);
// console.info(`rspHeaders=${JSON.stringify(rspHeaders)}`)
// let r2: Response = undefined;
// if (contentType.includes("xml")) {
@ -100,9 +98,9 @@ if (VALID_REQURL) {
// contentType.includes("json") ||
// contentType.includes("javascript")
// ) {
// log.info('inside json branch');
// console.info('inside json branch');
// // const j = r.json;
// // log.info(j);
// // console.info(j);
// r2 = new Response(
// r.text, // yea, here is the text because Response constructor expects a text
// {
@ -178,7 +176,7 @@ const getWebdavPath = (fileOrFolderPath: string, remoteBaseDir: string) => {
// special
key = `/${remoteBaseDir}/`;
} else if (fileOrFolderPath.startsWith("/")) {
log.warn(
console.warn(
`why the path ${fileOrFolderPath} starts with '/'? but we just go on.`
);
key = `/${remoteBaseDir}${fileOrFolderPath}`;
@ -259,7 +257,7 @@ export class WrappedWebdavClient {
: AuthType.Password,
});
} else {
log.info("no password");
console.info("no password");
this.client = createClient(this.webdavConfig.address, {
headers: headers,
});
@ -271,12 +269,12 @@ export class WrappedWebdavClient {
} else {
const res = await this.client.exists(`/${this.remoteBaseDir}/`);
if (res) {
// log.info("remote vault folder exits!");
// console.info("remote vault folder exits!");
this.vaultFolderExists = true;
} else {
log.info("remote vault folder not exists, creating");
console.info("remote vault folder not exists, creating");
await this.client.createDirectory(`/${this.remoteBaseDir}/`);
log.info("remote vault folder created!");
console.info("remote vault folder created!");
this.vaultFolderExists = true;
}
}
@ -292,7 +290,7 @@ export class WrappedWebdavClient {
this.webdavConfig.manualRecursive = true;
if (this.saveUpdatedConfigFunc !== undefined) {
await this.saveUpdatedConfigFunc();
log.info(
console.info(
`webdav depth="auto_???" is changed to ${this.webdavConfig.depth}`
);
}
@ -323,11 +321,11 @@ export const getRemoteMeta = async (
remotePath: string
) => {
await client.init();
log.debug(`getRemoteMeta remotePath = ${remotePath}`);
console.debug(`getRemoteMeta remotePath = ${remotePath}`);
const res = (await client.client.stat(remotePath, {
details: false,
})) as FileStat;
log.debug(`getRemoteMeta res=${JSON.stringify(res)}`);
console.debug(`getRemoteMeta res=${JSON.stringify(res)}`);
return fromWebdavItemToEntity(res, client.remoteBaseDir);
};
@ -376,7 +374,7 @@ export const uploadToRemote = async (
await client.client.putFileContents(uploadFile, "", {
overwrite: true,
onUploadProgress: (progress: any) => {
// log.info(`Uploaded ${progress.loaded} bytes of ${progress.total}`);
// console.info(`Uploaded ${progress.loaded} bytes of ${progress.total}`);
},
});
@ -417,7 +415,7 @@ export const uploadToRemote = async (
await client.client.putFileContents(uploadFile, remoteContent, {
overwrite: true,
onUploadProgress: (progress: any) => {
log.info(`Uploaded ${progress.loaded} bytes of ${progress.total}`);
console.info(`Uploaded ${progress.loaded} bytes of ${progress.total}`);
},
});
@ -449,7 +447,7 @@ export const listAllFromRemote = async (client: WrappedWebdavClient) => {
itemsToFetch.push(q.pop()!);
}
const itemsToFetchChunks = chunk(itemsToFetch, CHUNK_SIZE);
// log.debug(itemsToFetchChunks);
// console.debug(itemsToFetchChunks);
const subContents = [] as FileStat[];
for (const singleChunk of itemsToFetchChunks) {
const r = singleChunk.map((x) => {
@ -495,7 +493,7 @@ const downloadFromRemoteRaw = async (
remotePath: string
) => {
await client.init();
// log.info(`getWebdavPath=${remotePath}`);
// console.info(`getWebdavPath=${remotePath}`);
const buff = (await client.client.getFileContents(remotePath)) as BufferLike;
if (buff instanceof ArrayBuffer) {
return buff;
@ -535,7 +533,7 @@ export const downloadFromRemote = async (
downloadFile = remoteEncryptedKey;
}
downloadFile = getWebdavPath(downloadFile, client.remoteBaseDir);
// log.info(`downloadFile=${downloadFile}`);
// console.info(`downloadFile=${downloadFile}`);
const remoteContent = await downloadFromRemoteRaw(client, downloadFile);
let localContent = remoteContent;
if (password !== "") {
@ -568,10 +566,10 @@ export const deleteFromRemote = async (
await client.init();
try {
await client.client.deleteFile(remoteFileName);
// log.info(`delete ${remoteFileName} succeeded`);
// console.info(`delete ${remoteFileName} succeeded`);
} catch (err) {
log.error("some error while deleting");
log.error(err);
console.error("some error while deleting");
console.error(err);
}
};
@ -586,7 +584,7 @@ export const checkConnectivity = async (
)
) {
const err = "Error: the url should start with http(s):// but it does not!";
log.error(err);
console.error(err);
if (callbackFunc !== undefined) {
callbackFunc(err);
}
@ -597,7 +595,7 @@ export const checkConnectivity = async (
const results = await getRemoteMeta(client, `/${client.remoteBaseDir}/`);
if (results === undefined) {
const err = "results is undefined";
log.error(err);
console.error(err);
if (callbackFunc !== undefined) {
callbackFunc(err);
}
@ -605,7 +603,7 @@ export const checkConnectivity = async (
}
return true;
} catch (err) {
log.error(err);
console.error(err);
if (callbackFunc !== undefined) {
callbackFunc(err);
}

View File

@ -44,13 +44,7 @@ import {
} from "./remoteForOnedrive";
import { messyConfigToNormal } from "./configPersist";
import type { TransItemType } from "./i18n";
import { checkHasSpecialCharForDir } from "./misc";
import {
applyLogWriterInplace,
log,
restoreLogWritterInplace,
} from "./moreOnLog";
import { checkHasSpecialCharForDir, stringToFragment } from "./misc";
import { simpleTransRemotePrefix } from "./remoteForS3";
class PasswordModal extends Modal {
@ -452,7 +446,7 @@ class DropboxAuthModal extends Modal {
);
this.close();
} catch (err) {
log.error(err);
console.error(err);
new Notice(t("modal_dropboxauth_maualinput_conn_fail"));
}
});
@ -588,7 +582,7 @@ export class OnedriveRevokeAuthModal extends Modal {
new Notice(t("modal_onedriverevokeauth_clean_notice"));
this.close();
} catch (err) {
log.error(err);
console.error(err);
new Notice(t("modal_onedriverevokeauth_clean_fail"));
}
});
@ -715,65 +709,6 @@ class ExportSettingsQrCodeModal extends Modal {
}
}
class SetLogToHttpServerModal extends Modal {
plugin: RemotelySavePlugin;
serverAddr: string;
callBack: any;
constructor(
app: App,
plugin: RemotelySavePlugin,
serverAddr: string,
callBack: any
) {
super(app);
this.plugin = plugin;
this.serverAddr = serverAddr;
this.callBack = callBack;
}
onOpen() {
let { contentEl } = this;
const t = (x: TransItemType, vars?: any) => {
return this.plugin.i18n.t(x, vars);
};
contentEl.createEl("h2", { text: t("modal_logtohttpserver_title") });
const div1 = contentEl.createDiv();
div1.addClass("logtohttpserver-warning");
t("modal_logtohttpserver_desc")
.split("\n")
.forEach((val) => {
div1.createEl("p", {
text: val,
});
});
new Setting(contentEl)
.addButton((button) => {
button.setButtonText(t("modal_logtohttpserver_secondconfirm"));
button.setClass("logtohttpserver-warning");
button.onClick(async () => {
this.callBack();
new Notice(t("modal_logtohttpserver_notice"));
this.close();
});
})
.addButton((button) => {
button.setButtonText(t("goback"));
button.onClick(() => {
this.close();
});
});
}
onClose() {
let { contentEl } = this;
contentEl.empty();
}
}
const getEyesElements = () => {
const eyeEl = createElement(Eye);
const eyeOffEl = createElement(EyeOff);
@ -1153,7 +1088,7 @@ export class RemotelySaveSettingTab extends PluginSettingTab {
);
new Notice(t("settings_dropbox_revoke_notice"));
} catch (err) {
log.error(err);
console.error(err);
new Notice(t("settings_dropbox_revoke_noticeerr"));
}
});
@ -1723,7 +1658,7 @@ export class RemotelySaveSettingTab extends PluginSettingTab {
realVal > 0
) {
const intervalID = window.setInterval(() => {
log.info("auto run from settings.ts");
console.info("auto run from settings.ts");
this.plugin.syncRun("auto");
}, realVal);
this.plugin.autoRunIntervalID = intervalID;
@ -1795,7 +1730,7 @@ export class RemotelySaveSettingTab extends PluginSettingTab {
// then schedule a run for syncOnSaveAfterMilliseconds after it was modified
const lastModified = currentFile.stat.mtime;
const currentTime = Date.now();
// log.debug(
// console.debug(
// `Checking if file was modified within last ${
// this.plugin.settings.syncOnSaveAfterMilliseconds / 1000
// } seconds, last modified: ${
@ -1810,7 +1745,7 @@ export class RemotelySaveSettingTab extends PluginSettingTab {
const scheduleTimeFromNow =
this.plugin.settings.syncOnSaveAfterMilliseconds! -
(currentTime - lastModified);
log.info(
console.info(
`schedule a run for ${scheduleTimeFromNow} milliseconds later`
);
runScheduled = true;
@ -2074,9 +2009,8 @@ export class RemotelySaveSettingTab extends PluginSettingTab {
.setValue(this.plugin.settings.currLogLevel ?? "info")
.onChange(async (val: string) => {
this.plugin.settings.currLogLevel = val;
log.setLevel(val as any);
await this.plugin.saveSettings();
log.info(`the log level is changed to ${val}`);
console.info(`the log level is changed to ${val}`);
});
});
@ -2087,11 +2021,15 @@ export class RemotelySaveSettingTab extends PluginSettingTab {
button.setButtonText(t("settings_outputsettingsconsole_button"));
button.onClick(async () => {
const c = messyConfigToNormal(await this.plugin.loadData());
log.info(c);
console.info(c);
new Notice(t("settings_outputsettingsconsole_notice"));
});
});
new Setting(debugDiv)
.setName(t("settings_viewconsolelog"))
.setDesc(stringToFragment(t("settings_viewconsolelog_desc")));
new Setting(debugDiv)
.setName(t("settings_syncplans"))
.setDesc(t("settings_syncplans_desc"))
@ -2118,53 +2056,6 @@ export class RemotelySaveSettingTab extends PluginSettingTab {
});
});
let logToHttpServer = this.plugin.debugServerTemp || "";
new Setting(debugDiv)
.setName(t("settings_logtohttpserver"))
.setDesc(t("settings_logtohttpserver_desc"))
.addText(async (text) => {
text.setValue(logToHttpServer).onChange(async (value) => {
logToHttpServer = value.trim();
});
})
.addButton(async (button) => {
button.setButtonText(t("confirm"));
button.onClick(async () => {
if (logToHttpServer === "" || !logToHttpServer.startsWith("http")) {
this.plugin.debugServerTemp = "";
logToHttpServer = "";
// restoreLogWritterInplace();
new Notice(t("settings_logtohttpserver_reset_notice"));
} else {
new SetLogToHttpServerModal(
this.app,
this.plugin,
logToHttpServer,
() => {
this.plugin.debugServerTemp = logToHttpServer;
// applyLogWriterInplace((...msg: any[]) => {
// try {
// requestUrl({
// url: logToHttpServer,
// method: "POST",
// headers: {
// "Content-Type": "application/json",
// },
// body: JSON.stringify({
// send_time: Date.now(),
// log_text: msg,
// }),
// });
// } catch (e) {
// // pass
// }
// });
}
).open();
}
});
});
new Setting(debugDiv)
.setName(t("settings_delprevsync"))
.setDesc(t("settings_delprevsync_desc"))

View File

@ -31,7 +31,6 @@ import {
import { RemoteClient } from "./remote";
import { Vault } from "obsidian";
import { log } from "./moreOnLog";
import AggregateError from "aggregate-error";
import {
InternalDBs,
@ -302,7 +301,7 @@ const encryptLocalEntityInplace = async (
password: string,
remoteKeyEnc: string | undefined
) => {
// log.debug(
// console.debug(
// `encryptLocalEntityInplace: local=${JSON.stringify(
// local,
// null,
@ -500,14 +499,14 @@ export const getSyncPlanInplace = async (
const mixedEntry = mixedEntityMappings[key];
const { local, prevSync, remote } = mixedEntry;
// log.debug(`getSyncPlanInplace: key=${key}`)
// console.debug(`getSyncPlanInplace: key=${key}`)
if (key.endsWith("/")) {
// folder
// folder doesn't worry about mtime and size, only check their existences
if (keptFolder.has(key)) {
// parent should also be kept
// log.debug(`${key} in keptFolder`)
// console.debug(`${key} in keptFolder`)
keptFolder.add(getParentFolder(key));
// should fill the missing part
if (local !== undefined && remote !== undefined) {
@ -806,9 +805,9 @@ const splitThreeStepsOnEntityMappings = (
val.decision === "folder_existed_remote" ||
val.decision === "folder_to_be_created"
) {
// log.debug(`splitting folder: key=${key},val=${JSON.stringify(val)}`);
// console.debug(`splitting folder: key=${key},val=${JSON.stringify(val)}`);
const level = atWhichLevel(key);
// log.debug(`atWhichLevel: ${level}`);
// console.debug(`atWhichLevel: ${level}`);
const k = folderCreationOps[level - 1];
if (k === undefined || k === null) {
folderCreationOps[level - 1] = [val];
@ -880,7 +879,7 @@ const dispatchOperationToActualV3 = async (
localDeleteFunc: any,
password: string
) => {
// log.debug(
// console.debug(
// `inside dispatchOperationToActualV3, key=${key}, r=${JSON.stringify(
// r,
// null,
@ -912,7 +911,7 @@ const dispatchOperationToActualV3 = async (
// special treatment for OneDrive: do nothing, skip empty file without encryption
// if it's empty folder, or it's encrypted file/folder, it continues to be uploaded.
} else {
// log.debug(`before upload in sync, r=${JSON.stringify(r, null, 2)}`);
// console.debug(`before upload in sync, r=${JSON.stringify(r, null, 2)}`);
const { entity, mtimeCli } = await client.uploadToRemote(
r.key,
vault,
@ -986,13 +985,13 @@ export const doActualSync = async (
callbackSyncProcess: any,
db: InternalDBs
) => {
log.debug(`concurrency === ${concurrency}`);
console.debug(`concurrency === ${concurrency}`);
const { folderCreationOps, deletionOps, uploadDownloads, realTotalCount } =
splitThreeStepsOnEntityMappings(mixedEntityMappings);
// log.debug(`folderCreationOps: ${JSON.stringify(folderCreationOps)}`);
// log.debug(`deletionOps: ${JSON.stringify(deletionOps)}`);
// log.debug(`uploadDownloads: ${JSON.stringify(uploadDownloads)}`);
// log.debug(`realTotalCount: ${JSON.stringify(realTotalCount)}`);
// console.debug(`folderCreationOps: ${JSON.stringify(folderCreationOps)}`);
// console.debug(`deletionOps: ${JSON.stringify(deletionOps)}`);
// console.debug(`uploadDownloads: ${JSON.stringify(uploadDownloads)}`);
// console.debug(`realTotalCount: ${JSON.stringify(realTotalCount)}`);
const nested = [folderCreationOps, deletionOps, uploadDownloads];
const logTexts = [
@ -1003,14 +1002,16 @@ export const doActualSync = async (
let realCounter = 0;
for (let i = 0; i < nested.length; ++i) {
log.debug(logTexts[i]);
console.debug(logTexts[i]);
const operations = nested[i];
// log.debug(`curr operations=${JSON.stringify(operations, null, 2)}`);
// console.debug(`curr operations=${JSON.stringify(operations, null, 2)}`);
for (let j = 0; j < operations.length; ++j) {
const singleLevelOps = operations[j];
log.debug(`singleLevelOps=${JSON.stringify(singleLevelOps, null, 2)}`);
console.debug(
`singleLevelOps=${JSON.stringify(singleLevelOps, null, 2)}`
);
if (singleLevelOps === undefined || singleLevelOps === null) {
continue;
}
@ -1024,7 +1025,9 @@ export const doActualSync = async (
const key = val.key;
const fn = async () => {
log.debug(`start syncing "${key}" with plan ${JSON.stringify(val)}`);
console.debug(
`start syncing "${key}" with plan ${JSON.stringify(val)}`
);
if (callbackSyncProcess !== undefined) {
await callbackSyncProcess(
@ -1048,7 +1051,7 @@ export const doActualSync = async (
password
);
log.debug(`finished ${key}`);
console.debug(`finished ${key}`);
};
queue.add(fn).catch((e) => {

View File

@ -2,7 +2,6 @@ import { App, Modal, Notice, PluginSettingTab, Setting } from "obsidian";
import type RemotelySavePlugin from "./main"; // unavoidable
import type { TransItemType } from "./i18n";
import { log } from "./moreOnLog";
import { stringToFragment } from "./misc";
export class SyncAlgoV3Modal extends Modal {
@ -88,13 +87,13 @@ export class SyncAlgoV3Modal extends Modal {
let { contentEl } = this;
contentEl.empty();
if (this.agree) {
log.info("agree to use the new algorithm");
console.info("agree to use the new algorithm");
this.plugin.saveAgreeToUseNewSyncAlgorithm();
this.plugin.enableAutoSyncIfSet();
this.plugin.enableInitSyncIfSet();
this.plugin.enableSyncOnSaveIfSet();
} else {
log.info("do not agree to use the new algorithm");
console.info("do not agree to use the new algorithm");
this.plugin.unload();
}
}