Compare commits

...

82 Commits

Author SHA1 Message Date
fyears 1e4d729eb7 make the wording explict 2024-06-02 23:50:54 +08:00
fyears 64371b7d65 bump to 0.5.3 2024-06-02 23:38:44 +08:00
fyears e116bb1deb google drive is usable now 2024-06-02 23:37:53 +08:00
fyears 2ace90155c fix build and bump to 0.5.2 2024-05-27 00:53:07 +08:00
fyears d885a4c743 remove useless code 2024-05-27 00:36:52 +08:00
fyears b6d46d8b91 remove https detection 2024-05-27 00:35:55 +08:00
fyears 06dad54d4c pro and smart conflict 2024-05-27 00:33:49 +08:00
fyears 0802767726 bump to 0.4.25 2024-05-25 15:58:52 +08:00
fyears be4a2d3271 nextcloud address 2024-05-25 15:57:48 +08:00
fyears 7ca2d19255 more debug info 2024-05-25 15:53:38 +08:00
fyears ff765d5ae7 change back to local time 2024-05-25 15:53:27 +08:00
fyears bdbf0b1484 nextcloud is good 2024-05-25 15:39:29 +08:00
fyears 7497b5fae7 split export 2024-05-25 15:22:19 +08:00
fyears 408acb6230 add custom protection 2024-05-25 14:37:37 +08:00
fyears d9cab7b1ff fix kind 2024-05-24 23:13:40 +08:00
fyears 5e53967e01 bump to 0.4.24 2024-05-24 23:11:37 +08:00
fyears fb9f4a67b4 fix size bug 2024-05-24 23:11:01 +08:00
fyears 26a426dda8 bump to 0.4.23 2024-05-24 22:42:21 +08:00
fyears de64c3c53f fix condition for partial update 2024-05-24 22:41:43 +08:00
fyears 0cefafa491 special treatment for jianguoyun 2024-05-24 22:08:02 +08:00
fyears b769becb97 bump to 0.4.22 2024-05-21 01:04:22 +08:00
fyears 7b3600a46f correct way for nextcloud 2024-05-20 09:56:09 +08:00
fyears 69e72eae1d correctly set range update 2024-05-19 21:41:01 +08:00
fyears b0acde0ba6 remove recursive to fix digest 2024-05-19 20:36:34 +08:00
fyears 11b7fee80b remove unnecessary get 2024-05-19 19:59:16 +08:00
fyears c4c39f6b79 make it more robust 2024-05-19 19:13:56 +08:00
fyears 45578a01dd chunk webdav upload to 5 mb 2024-05-19 18:10:32 +08:00
fyears 0391c42999 wrap settings 2024-05-19 18:09:41 +08:00
fyears 807eec928e optimize sync plan export 2024-05-19 17:51:44 +08:00
fyears 3d7c4d2a4a optimize webdav 2024-05-19 17:03:14 +08:00
fyears cb779fc7bf add upload by chunks for webdav 2024-05-19 15:58:49 +08:00
fyears 9d8e2af7b9 allow skipping empty file in onedrive 2024-05-18 12:03:53 +08:00
Jason a48440e60b
Refine wording and grammar in some descriptions of settings. (#655)
* Refine wording and grammar in some descriptions of settings.

* Remove extra space in settings_encryptionmethod_desc.

* Add required ending comma syntax to settings_webdav_auth_desc.
2024-05-18 02:32:07 +08:00
fyears 2a84fae368 fix typo 2024-05-18 02:31:28 +08:00
fyears 3c3426a842 add more size range 2024-05-18 02:30:50 +08:00
fyears 74de7da89a fix timestamp to date using native js 2024-05-18 01:41:54 +08:00
fyears 1f33ac5d7a remove verbose webdav output 2024-05-18 01:34:46 +08:00
fyears 63c54d1956 check password using walkPartial instead of cache 2024-05-18 01:33:20 +08:00
fyears b584f89a95 profiler itself might impact performance 2024-05-18 00:03:21 +08:00
fyears fa17ea074b format 2024-05-17 22:59:44 +08:00
fyears cb98bae79a buck? localforage 2024-05-17 22:59:34 +08:00
fyears d1e30e3536 bump to 0.4.21 2024-05-09 00:02:07 +08:00
fyears 36079fc1d0 add profiler 2024-05-09 00:01:30 +08:00
fyears 67467a5034 fail statusbar 2024-05-08 22:50:18 +08:00
fyears 2a3df8ab53 fix onedrive issue in enc 2024-05-08 22:37:34 +08:00
fyears e66b0c71c4 fix format again 2024-05-08 22:04:21 +08:00
fyears a081d09212 safe lint from biome 2024-05-08 00:20:15 +08:00
fyears 6ed6122bb6 rm prettier 2024-05-07 23:51:26 +08:00
fyears 235e346d2f slightly format 2024-05-07 23:48:37 +08:00
fyears 084cbc8391 switch formater to biome for speed 2024-05-07 23:48:29 +08:00
fyears dc0c1db779 enable git lfs cache to avoid billing according to https://github.com/actions/checkout/issues/165 2024-05-07 23:22:51 +08:00
fyears 2645ff34e6 bump to 0.4.20 2024-05-07 00:05:41 +08:00
fyears f25a2c2992 shorten text 2024-05-07 00:02:34 +08:00
fyears 3d1269a9f2 add a little helper to see file stat 2024-05-07 00:01:21 +08:00
fyears ed52a8542f fix s3 mtime problem 2024-05-06 23:41:48 +08:00
fyears 757eb5c801 format 2024-04-30 00:57:22 +08:00
fyears 048e7b6251
update readme of s3 (#639) 2024-04-30 00:46:15 +08:00
fyears b762052da3 customize the welcome text 2024-04-30 00:37:52 +08:00
fyears c61efd1367 add steps for cla 2024-04-30 00:13:27 +08:00
fyears 605bffa471
add webdis (#638) 2024-04-30 00:04:00 +08:00
fyears 895e3db4c6 cla bot 2024-04-29 23:51:12 +08:00
fyears ce1990a35f change loglevel for verbose mixedEntityMappings 2024-04-28 00:55:56 +08:00
fyears 9ea7c8e858 bump to 0.4.19 2024-04-28 00:31:14 +08:00
fyears 61a3fab219 webdis 2024-04-27 23:10:36 +08:00
fyears 5340e38eac format of doc 2024-04-27 17:11:59 +08:00
fyears 8d5868b8d8 fix process counting 2024-04-27 17:11:48 +08:00
fyears 55175a6d06 bump to 0.4.18 2024-04-27 12:34:10 +08:00
fyears f60bd25490 add enc for upyun 2024-04-27 12:21:31 +08:00
fyears 1d8463a3ed add upyun 2024-04-27 12:13:09 +08:00
fyears 9fe1b1d5e6 clean both folder if empty 2024-04-27 12:03:36 +08:00
fyears df7b6e1848 allowing s3 synth folder 2024-04-27 12:01:30 +08:00
fyears a9126e5947 fix and optimize tests 2024-04-27 03:28:39 +08:00
fyears 3bb7355db3 update package 2024-04-27 02:35:38 +08:00
fyears f33fa26c03 a large semi-rewrite of fs logic 2024-04-27 02:27:24 +08:00
Kira Kawai 5ce350ba41
💄 setings: overflow-wrap: break-word (#597)
Co-authored-by: ras0q <ras0q@users.noreply.github.com>
2024-04-26 23:23:34 +08:00
lyiton 1d32f1242d
Update zh_cn.json (#598) 2024-04-19 23:33:03 +08:00
fyears d8ab054da1 change css on mobile to make multiple buttons happy 2024-04-05 11:53:44 +08:00
fyears b877228415 revert back the webdav path func 2024-04-05 11:37:03 +08:00
fyears 28b99557a8 clean up reverse proxy 2024-04-05 11:06:16 +08:00
fyears ae28cf9183 format 2024-04-05 10:48:53 +08:00
Yesterday17 8f68ac4ded
handle relative path correctly (#226)
Co-authored-by: fyears <1142836+fyears@users.noreply.github.com>
2024-04-05 10:45:23 +08:00
Adens Wang 220fd07a8b
feat: add reserve proxy url (#479)
Co-authored-by: Adens <dwang@senparc.com>
Co-authored-by: fyears <1142836+fyears@users.noreply.github.com>
2024-04-05 10:36:58 +08:00
91 changed files with 9479 additions and 4532 deletions

View File

@ -1,3 +1,7 @@
DROPBOX_APP_KEY=
ONEDRIVE_CLIENT_ID=
ONEDRIVE_AUTHORITY=https://
REMOTELYSAVE_WEBSITE=http://127.0.0.1:46683
REMOTELYSAVE_CLIENT_ID=cli-xxx
GOOGLEDRIVE_CLIENT_ID=xxx.apps.googleusercontent.com
GOOGLEDRIVE_CLIENT_SECRET=GOCSPX-sss

View File

@ -19,6 +19,10 @@ jobs:
DROPBOX_APP_KEY: ${{secrets.DROPBOX_APP_KEY}}
ONEDRIVE_CLIENT_ID: ${{secrets.ONEDRIVE_CLIENT_ID}}
ONEDRIVE_AUTHORITY: ${{secrets.ONEDRIVE_AUTHORITY}}
REMOTELYSAVE_WEBSITE: ${{secrets.REMOTELYSAVE_WEBSITE}}
REMOTELYSAVE_CLIENT_ID: ${{secrets.REMOTELYSAVE_CLIENT_ID}}
GOOGLEDRIVE_CLIENT_ID: ${{secrets.GOOGLEDRIVE_CLIENT_ID}}
GOOGLEDRIVE_CLIENT_SECRET: ${{secrets.GOOGLEDRIVE_CLIENT_SECRET}}
strategy:
matrix:
@ -29,10 +33,18 @@ jobs:
- name: Checkout codes
uses: actions/checkout@v2
with:
lfs: true
submodules: recursive
- name: Checkout LFS
run: git lfs checkout
- name: Checkout LFS file list
run: git lfs ls-files --long | cut -d ' ' -f1 | sort > .lfs-assets-id
- name: LFS Cache
uses: actions/cache@v3
with:
path: .git/lfs/objects
key: ${{ runner.os }}-lfs-${{ hashFiles('.lfs-assets-id') }}
restore-keys: |
${{ runner.os }}-lfs-
- name: Git LFS Pull
run: git lfs pull
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v2
with:

43
.github/workflows/cla.yml vendored Normal file
View File

@ -0,0 +1,43 @@
name: "CLA Assistant"
on:
issue_comment:
types: [created]
pull_request_target:
types: [opened,closed,synchronize]
# explicitly configure permissions, in case your GITHUB_TOKEN workflow permissions are set to read-only in repository settings
permissions:
actions: write
contents: write
pull-requests: write
statuses: write
jobs:
CLAAssistant:
runs-on: ubuntu-latest
steps:
- name: "CLA Assistant"
if: (github.event.comment.body == 'recheck' || github.event.comment.body == 'I have read the CLA Document and I hereby sign the CLA') || github.event_name == 'pull_request_target'
uses: contributor-assistant/github-action@v2.3.0
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# the below token should have repo scope and must be manually added by you in the repository's secret
# This token is required only if you have configured to store the signatures in a remote repository/organization
PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
with:
path-to-signatures: 'signatures/version1/cla.json'
path-to-document: 'https://github.com/remotely-save/remotely-save/blob/master/CLA.md' # e.g. a CLA or a DCO document
# branch should not be protected
branch: 'main'
allowlist: bot*
# the followings are the optional inputs - If the optional inputs are not given, then default values will be taken
remote-organization-name: remotely-save
remote-repository-name: cla-signed
#create-file-commit-message: 'For example: Creating file for storing CLA Signatures'
#signed-commit-message: 'For example: $contributorName has signed the CLA in $owner/$repo#$pullRequestNo'
custom-notsigned-prcomment: '<br/>Thank you for your submission, we really appreciate it. However, we ask that $you sign our [Contributor License Agreement](https://github.com/remotely-save/remotely-save/blob/master/CLA.md) before we can accept your contribution. You can sign the CLA by just posting a Pull Request Comment same as the below format.<br/>'
#custom-pr-sign-comment: 'The signature to be committed in order to sign the CLA'
#custom-allsigned-prcomment: 'pull request comment when all contributors has signed, defaults to **CLA Assistant Lite bot** All Contributors have signed the CLA.'
#lock-pullrequest-aftermerge: false - if you don't want this bot to automatically lock the pull request after merging (default - true)
#use-dco-flag: true - If you are using DCO instead of CLA

View File

@ -23,6 +23,10 @@ jobs:
DROPBOX_APP_KEY: ${{secrets.DROPBOX_APP_KEY}}
ONEDRIVE_CLIENT_ID: ${{secrets.ONEDRIVE_CLIENT_ID}}
ONEDRIVE_AUTHORITY: ${{secrets.ONEDRIVE_AUTHORITY}}
REMOTELYSAVE_WEBSITE: ${{secrets.REMOTELYSAVE_WEBSITE}}
REMOTELYSAVE_CLIENT_ID: ${{secrets.REMOTELYSAVE_CLIENT_ID}}
GOOGLEDRIVE_CLIENT_ID: ${{secrets.GOOGLEDRIVE_CLIENT_ID}}
GOOGLEDRIVE_CLIENT_SECRET: ${{secrets.GOOGLEDRIVE_CLIENT_SECRET}}
strategy:
matrix:
@ -32,10 +36,18 @@ jobs:
- name: Checkout codes
uses: actions/checkout@v2
with:
lfs: true
submodules: recursive
- name: Checkout LFS
run: git lfs checkout
- name: Checkout LFS file list
run: git lfs ls-files --long | cut -d ' ' -f1 | sort > .lfs-assets-id
- name: LFS Cache
uses: actions/cache@v3
with:
path: .git/lfs/objects
key: ${{ runner.os }}-lfs-${{ hashFiles('.lfs-assets-id') }}
restore-keys: |
${{ runner.os }}-lfs-
- name: Git LFS Pull
run: git lfs pull
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v2
with:

View File

@ -1,3 +0,0 @@
node_modules/
main.js
data.json

23
CLA.md Normal file
View File

@ -0,0 +1,23 @@
In order to clarify the intellectual property license granted with Contributions from any person or entity, Remotely Save dev team ("Remotely Save") must have on file a signed Contributor License Agreement ("CLA") from each Contributor, indicating agreement with the license terms below. This agreement is for your protection as a Contributor as well as the protection of Remotely Save and its users. It does not change your rights to use your own Contributions for any other purpose.
You accept and agree to the following terms and conditions for Your Contributions (present and future) that you submit to Remotely Save. Except for the license granted herein to Remotely Save and recipients of software distributed by Remotely Save, You reserve all right, title, and interest in and to Your Contributions.
1. Definitions.
"You" (or "Your") shall mean the copyright owner or legal entity authorized by the copyright owner that is making this Agreement with Remotely Save. For legal entities, the entity making a Contribution and all other entities that control, are controlled by, or are under common control with that entity are considered to be a single Contributor. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.
"Contribution" shall mean any original work of authorship, including any modifications or additions to an existing work, that is intentionally submitted by You to Remotely Save for inclusion in, or documentation of, any of the products owned or managed by Remotely Save (the "Work"). For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to Remotely Save or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, Remotely Save for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by You as "Not a Contribution."
2. Grant of Copyright License. Subject to the terms and conditions of this Agreement, You hereby grant to Remotely Save and to recipients of software distributed by Remotely Save a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare derivative works of, publicly display, publicly perform, sublicense, and distribute Your Contributions and such derivative works.
3. Grant of Patent License. Subject to the terms and conditions of this Agreement, You hereby grant to Remotely Save and to recipients of software distributed by Remotely Save a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by You that are necessarily infringed by Your Contribution(s) alone or by combination of Your Contribution(s) with the Work to which such Contribution(s) was submitted. If any entity institutes patent litigation against You or any other entity (including a cross-claim or counterclaim in a lawsuit) alleging that your Contribution, or the Work to which you have contributed, constitutes direct or contributory patent infringement, then any patent licenses granted to that entity under this Agreement for that Contribution or Work shall terminate as of the date such litigation is filed.
4. You represent that you are legally entitled to grant the above license. If your employer(s) has rights to intellectual property that you create that includes your Contributions, you represent that you have received permission to make Contributions on behalf of that employer, that your employer has waived such rights for your Contributions to Remotely Save, or that your employer has executed a separate Corporate CLA with Remotely Save.
5. You represent that each of Your Contributions is Your original creation (see section 7 for submissions on behalf of others). You represent that Your Contribution submissions include complete details of any third-party license or other restriction (including, but not limited to, related patents and trademarks) of which you are personally aware and which are associated with any part of Your Contributions.
6. You are not expected to provide support for Your Contributions, except to the extent You desire to provide support. You may provide support for free, for a fee, or not at all. Unless required by applicable law or agreed to in writing, You provide Your Contributions on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE.
7. Should You wish to submit work that is not Your original creation, You may submit it to Remotely Save separately from any Contribution, identifying the complete details of its source and of any license or other restriction (including, but not limited to, related patents, trademarks, and license agreements) of which you are personally aware, and conspicuously marking the work as "Submitted on behalf of a third-party: [named here]".
8. You agree to notify Remotely Save of any facts or circumstances of which you become aware that would make these representations inaccurate in any respect.

22
CONTRIBUTING.md Normal file
View File

@ -0,0 +1,22 @@
# Contributing
## What
Starting from April 29, 2024, all individual contributors' contributions are only possibly accepted after they sign the CLA.
We do not accept corporate contributions at this moment.
You can check out [CLA](./CLA.md).
## Steps
1. Make some changes to the code. Open a pull request.
2. A rebot will check the status.
![robot check](./assets/cla-process/cla-robot-alert.png)
3. Read the [CLA](./CLA.md) carefully and make a decision.
4. If you decide to sign the CLA, please make a comment "I have read the CLA Document and I hereby sign the CLA".
5. If you decide to not sign the CLA, please close the PR.
6. The robot should recheck and pass the check automatically.
![robot recheck](./assets/cla-process/cla-sign-and-pass.png)
7. Your pr will be reviewed.
8. If you sign the CLA and submit some more PRs, the robot should pass the test automatically.

203
LICENSE
View File

@ -1,202 +1,3 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
The codes or files or subfolders inside the folder `src`, `tests`, `docs`, `assets`, are released under the "Open Source" license: "Apache License, version 2.0", described at: https://www.apache.org/licenses/LICENSE-2.0 .
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
The codes or files or subfolders inside the folder `pro`, are released under the "Source Available" license: "PolyForm Strict License 1.0.0", described at: https://polyformproject.org/licenses/strict/1.0.0/ .

View File

@ -21,15 +21,17 @@ This is yet another unofficial sync plugin for Obsidian. If you like it or find
- Dropbox
- OneDrive for personal
- Webdav
- Webdis
- Google Drive (PRO feature)
- [Here](./docs/services_connectable_or_not.md) shows more connectable (or not-connectable) services in details.
- **Obsidian Mobile supported.** Vaults can be synced across mobile and desktop devices with the cloud service as the "broker".
- **[End-to-end encryption](./docs/encryption/README.md) supported.** Files would be encrypted using openssl format before being sent to the cloud **if** user specify a password.
- **Scheduled auto sync supported.** You can also manually trigger the sync using sidebar ribbon, or using the command from the command palette (or even bind the hot key combination to the command then press the hot key combination).
- **[Minimal Intrusive](./docs/minimal_intrusive_design.md).**
- **Skip Large files** and **skip paths** by custom regex conditions!
- **Fully open source under [Apache-2.0 License](./LICENSE).**
- **[Sync Algorithm open](./docs/sync_algorithm/v3/intro.md) for discussion.**
- **[Basic Conflict Detection And Handling](./docs/sync_algorithm/v3/intro.md)** now, more to come!
- **[Sync Algorithm](./docs/sync_algorithm/v3/intro.md) is provided for discussion.**
- **[Basic Conflict Detection And Handling](./docs/sync_algorithm/v3/intro.md)** for free version. **[Advanced Conflict Handling](./pro/README.md)** for PRO version.
- Source Available. See [License](./LICENSE) for details.
## Limitations
@ -66,11 +68,13 @@ Additionally, the plugin author may occasionally visit Obsidian official forum a
- [Storj](./docs/remote_services/s3_storj_io/README.md)
- [腾讯云 COS](./docs/remote_services/s3_tencent_cloud_cos/README.zh-cn.md) | [Tencent Cloud COS](./docs/remote_services/s3_tencent_cloud_cos/README.md)
- [MinIO](./docs/remote_services/s3_minio/README.md)
- Prepare your S3 (-compatible) service information: [endpoint, region](https://docs.aws.amazon.com/general/latest/gr/s3.html), [access key id, secret access key](https://docs.aws.amazon.com/sdk-for-javascript/v3/developer-guide/getting-your-credentials.html), bucket name. The bucket should be empty and solely for syncing a vault.
- [又拍云](./docs/remote_services/s3_upyun/README.zh-cn.md)
- Prepare your S3 (-compatible) service information: [endpoint, region](https://docs.aws.amazon.com/general/latest/gr/s3.html), [access key id, secret access key](https://docs.aws.amazon.com/sdk-for-javascript/v3/developer-guide/getting-your-credentials.html), bucket name.
- If you are using AWS S3, create [policy and user](./docs/remote_services/s3_general/s3_user_policy.md).
- Very old version of Obsidian needs [configuring CORS](./docs/remote_services/s3_general/s3_cors_configure.md).
- Download and enable this plugin.
- Enter your information to the settings of this plugin.
- If you do not set the prefix in the settings, the bucket should be empty and solely for syncing a vault. You can set the prefix in the settings so that the same bucket can store multiple vaults.
- If you want to enable end-to-end encryption, also set a password in settings. If you do not specify a password, the files and folders are synced in plain, original content to the cloud.
- Click the new "circle arrow" icon on the ribbon (the left sidebar), **every time** you want to sync your vault between local and remote. (Or, you could configure auto sync in the settings panel (See next chapter).) While syncing, the icon becomes "two half-circle arrows". Besides clicking the icon on the sidebar ribbon, you can also activate the corresponding command in the command palette.
- **Be patient while syncing.** Especially in the first-time sync.
@ -97,6 +101,8 @@ Additionally, the plugin author may occasionally visit Obsidian official forum a
### webdav
- Tutorials / Examples:
- [Nextcloud](./docs/remote_services/webdav_nextcloud/README.md)
- [The Good Cloud](./docs/remote_services/webdav_thegoodcloud/README.md)
- [ownCloud](./docs/remote_services/webdav_owncloud/README.md)
- [InfiniCloud](./docs/remote_services/webdav_infinicloud_teracloud/README.md)
- [Synology webdav server](./docs/remote_services/webdav_synology_webdav_server/README.md) | [群晖 webdav server](./docs/remote_services/webdav_synology_webdav_server/README.zh-cn.md)
@ -107,6 +113,17 @@ Additionally, the plugin author may occasionally visit Obsidian official forum a
- Password-based end-to-end encryption is also supported. But please be aware that **the vault name itself is not encrypted**.
- If you want to sync the files across multiple devices, **your vault name should be the same** while using default settings.
### Webdis
- Tutorials:
- [Webdis](./docs/remote_services/webdis/README.md)
- Mostly experimental.
- You have to setup and protect your web server by yourself.
### Google Drive (PRO feature)
PRO (paid) feature "sync with Google Drive" allows users to to sync with Google Drive. Tutorials and limitations are documented [here](./docs/remote_services/googledrive/README.md).
## Scheduled Auto Sync
- You can configure auto syncing every N minutes in settings.
@ -119,6 +136,10 @@ Additionally, the plugin author may occasionally visit Obsidian official forum a
In the latest version, you can change the settings to allow syncing `_` files or folders, as well as `.obsidian` special config folder (but not any other `.` files or folders).
## PRO Features
See [PRO](./docs/pro/README.md) for more details.
## How To Debug
See [here](./docs/how_to_debug/README.md) for more details.

BIN
assets/cla-process/cla-robot-alert.png (Stored with Git LFS) Normal file

Binary file not shown.

BIN
assets/cla-process/cla-sign-and-pass.png (Stored with Git LFS) Normal file

Binary file not shown.

55
biome.json Normal file
View File

@ -0,0 +1,55 @@
{
"$schema": "https://biomejs.dev/schemas/1.7.3/schema.json",
"organizeImports": {
"enabled": true
},
"files": {
"ignore": ["main.js"]
},
"formatter": {
"enabled": true,
"formatWithErrors": false,
"ignore": [],
"attributePosition": "auto",
"indentStyle": "space",
"indentWidth": 2,
"lineEnding": "lf",
"lineWidth": 80
},
"javascript": {
"formatter": {
"arrowParentheses": "always",
"bracketSameLine": false,
"bracketSpacing": true,
"jsxQuoteStyle": "double",
"quoteProperties": "asNeeded",
"semicolons": "always",
"trailingComma": "es5"
}
},
"linter": {
"enabled": true,
"rules": {
"recommended": true,
"suspicious": {
"noExplicitAny": "off",
"noPrototypeBuiltins": "off",
"noControlCharactersInRegex": "off"
},
"style": {
"noUselessElse": "off",
"useNodejsImportProtocol": "off",
"noUnusedTemplateLiteral": "off",
"useTemplate": "off",
"noNonNullAssertion": "off"
},
"performance": {
"noDelete": "off"
},
"complexity": {
"noForEach": "off",
"useLiteralKeys": "off"
}
}
}
}

View File

@ -1,25 +0,0 @@
// Importing the http module
const http = require("http");
const requestHandler = (req, res) => {
let body = [];
req
.on("data", (chunk) => {
body.push(chunk);
})
.on("end", () => {
const parsed = JSON.parse(Buffer.concat(body).toString());
const prettyParsed = JSON.stringify(parsed, null, 2);
console.log(prettyParsed);
res.setHeader("Content-Type", "application/json");
res.end(prettyParsed);
});
};
const server = http.createServer(requestHandler);
const addr = "0.0.0.0";
const port = 3000;
server.listen(port, addr, undefined, () => {
console.log(`Server is Running on ${addr}:${port}`);
});

53
docs/pro/README.md Normal file
View File

@ -0,0 +1,53 @@
# PRO Features
From version 0.5.x, Remotely Save introduces PRO (paid) features. Users need to subscribe to (pay) them to use them.
**If you are using basic features only, you don't need an online account, and you don't need to pay for the plugin.**
# Links
* Remotely Save official website: <https://remotelysave.com>
* Sign up / Sign in: <https://remotelysave.com/user/signupin>
* User profile: <https://remotelysave.com/user/profile>
# Disclaimer
It's different from, and NOT affiliated with Obsidian account.
# Steps
## Steps of signing up and signing in
1. Go to the website, sign up and sign in. You can directly visit <https://remotelysave.com/user/signupin> or click the link in Remotely Save plugin setting page.
![pro setting](./pro_setting.png)
2. Use an email and your password as usual. Don't need to be GMail account.
## Steps of connecting
You need to connect your plugin to your online account. In Obsidian, in your Remotely Save plugin setting, you can click the button "Connect" to start the flow.
1. You will see a special address on website. Click it and visit the website
2. Click "allow" on the website.
3. In the end of the auth flow on the website, you will be shown up a code, please copy it...
4. And paste the code back to the plugin modal inside Obsidian, and confirm.
![connect flow](./connect_flow.png)
## Steps of subscribing to some features.
1. Firstly please visit your [profile page](https://remotelysave.com/user/profile) online.
2. You can subscribe to some features. Prices vary.
![PRO features online](./pro_features_enabled_on_website.png)
3. Go back to your Remotely Save plugin inside Obsidian, click "Check again" button in PRO settings. So that the plugin knows some features are enabled.
![check again PRO features](./check_pro_features_in_settings.png)
4. Sync and enjoy your PRO features!
## Why so complicated?
Because we doesn't have payment method inside the plugin, so we have to:
* build a website,
* require users having online accounts
* and connect the plugin to the online account.
Moreover, an online account allows flexibe management of subscriptions.

BIN
docs/pro/check_pro_features_in_settings.png (Stored with Git LFS) Normal file

Binary file not shown.

BIN
docs/pro/connect_flow.png (Stored with Git LFS) Normal file

Binary file not shown.

BIN
docs/pro/pro_features_enabled_on_website.png (Stored with Git LFS) Normal file

Binary file not shown.

BIN
docs/pro/pro_setting.png (Stored with Git LFS) Normal file

Binary file not shown.

View File

@ -0,0 +1,59 @@
# Google Drive (PRO)
# Intro
* It's a PRO feature of Remotely Save plugin.
* **This plugin is NOT an official Google product, and just uses Google Drive's public api.**
# Steps
## Steps of Remotely Save subscription
1. Please sign up and sign in an online account, connect your plugin to your online account firstly. See [the PRO tutorial](../../pro/README.md) firstly.
2. Subscribe to "sync with Google Drive" feature online.
3. Go back to your Remotely Save plugin inside Obsidian, click "Check again" button in PRO settings. So that the plugin knows some features are enabled. In this case, sync with Google Drive should be detected.
## Steps of Connecting to your Google Drive
After you enabled the PRO feature in your Remotely Save plugin, you can connect to your Google Drive account now.
1. In Remotely Save settings, change your sync service to Google Drive.
![change remote to google drive](./change_remote_to_google_drive.png)
2. Click Auth, visit the link, go to Remotely Save website to start.
![visit start link](./google_drive_auth_link.png)
3. On the website, click the link to go to Google Drive auth page.
4. Follow the instruction on Google website, and allow (continue) Remotely Save to connect.
![allow Remotely Save in Google website](./google_drive_auth_allow.png)
5. You will be redirected to Remotely Save website, and you will get a code. Copy it...
![redirected back and get the code](./google_drive_auth_code_show.png)
6. ... And paste the code back to the plugin inside Obsidian. Click submit.
![submit the code in setting](./google_drive_code_submit.png)
7. A notice will tell you that you've connected or not.
8. Sync! The plugin will create a vault folder in the root of your Google Drive and upload notes into that folder.
9. **Read the caveats below.**
# Why so complicated?
Because Google Drive's api doesn't fit into the special envorinment of Obsidian plugin. So we need a website.
# The credential
The website does **NOT** store or save the Google drive credential (the code you obtian in the end of the flow). The website is just a "bridge" to help you obtain that code, and just manage your subscription to PRO features.
But please be aware that the code is saved locally in your Obsidian. It works like a special password. So that the plugin can upload or download or modify the files for you.
# The caveats
* As of June 2024, this feature is in beta stage. **Back up your vault before using this feature.**
* The plugin can **only** sees, reads or writes the files and folders created by itself!
It means that, you CANNOT manually create the vault folder in your Google Drive account. And if you manually upload any files using Google's official website, the plugin does **NOT** see them. All operations must go through Obsidian and uploaded by the plugin.
You can, however, view, and download the files on Google Drive [official web page](https://drive.google.com/drive/u/0/my-drive).
Precisely speaking, the plugin applies for the `drive.file` scope recommended by Google. See [the doc](https://developers.google.com/drive/api/guides/api-specific-auth#benefits) by Google for the scope's benefits. Basically the plugin will never (is unable to) mess up your other files or folders.
Moreover, this scope is "not-sensitive", so that the plugin doesn't need to go through Google's complicated verification process while applying for it.
* Google Drive, unlike other cloud storage, allows files of same name co-existing in the same folder! (hmmmmm...) It may or may not make the plugin stop working. Users might need to remove the duplicated file manually on Google's official website.

Binary file not shown.

BIN
docs/remote_services/googledrive/google_drive_auth_allow.png (Stored with Git LFS) Normal file

Binary file not shown.

Binary file not shown.

BIN
docs/remote_services/googledrive/google_drive_auth_link.png (Stored with Git LFS) Normal file

Binary file not shown.

Binary file not shown.

View File

@ -0,0 +1,24 @@
# 又拍云
## 链接
- 官网 <https://www.upyun.com/>
- 官网的 S3 文档 <https://help.upyun.com/knowledge-base/aws-s3%e5%85%bc%e5%ae%b9/>
## 注意!!!!!
又拍云似乎(?)文件都是默认公开的,强烈建议注意隐私问题,强烈建议设置插件加密。
## 步骤
1. 注册,新建对象存储。
2. 参考官网文档 <https://help.upyun.com/knowledge-base/aws-s3%e5%85%bc%e5%ae%b9/>,创建操作员然后创建 S3 访问凭证。
3. 在 Remotely Save 设置以下:
- 服务地址Endpoint`s3.api.upyun.com` **一定是这个域名**
- 区域Region`us-east-1`
- Acccess Key ID您获取到的访问凭证的 AccessKey
- Secret Access Key您获取到的访问凭证的 SecretAccessKey
- 存储桶Bucket的名字您创建的“服务名”
- 是否生成文件夹 Object不生成默认 **一定要选择不生成**
4. 可以在插件设置里,加上密码。
5. 同步。

View File

@ -0,0 +1,13 @@
# Nextcloud
## Link
<https://nextcloud.com/>
## Steps
1. Install, or find a hosted version.
* The docker version <https://github.com/nextcloud/docker> for internal network, and [Caddy as reverse proxy](https://caddyserver.com/docs/quick-starts/reverse-proxy) (for https), are personally recommended.
* If you find installing Nextcloud by yourselves is difficult, you can find some "Nextcloud's trusted, certified providers" on [Nextcloud Sign up page](https://nextcloud.com/sign-up/); For example, [The Good Cloud](https://thegood.cloud/) there generously provides 2 GB free stoarage space.
* Remotely Save is tested to be working with the docker version and The Good Cloud.
2. Go to Nextcloud's settings. Find the webdav url (something like `https://cloud.example.com/remote.php/dav/files/USERNAME`). Use this (without tailing slash), and your account and your password, in Remotely Save.

View File

@ -0,0 +1,9 @@
# The Good Cloud
## Link
<https://thegood.cloud/>
## Steps
It's a hosted version of Nextcloud providing 2GB free spaces. See [NextCloud](../webdav_nextcloud/README.md) for more instructions.

View File

@ -0,0 +1,35 @@
# Webdis
## Links
- Webdis: <https://github.com/nicolasff/webdis>
- Redis®: <https://redis.io/>
## Explanation and Background
I like the Redis® software very much, and would like to experiment using it as a "file storage". It seems to be nature by using path as the key and the content as the value (Sort of..., see below).
However, Redis® works by using TCP connections, and browser js cannot establish raw TCP connections. We need a HTTP gateway, to provide the HTTP api. Wedis seems to be the most famous open source one.
And of course, this method should work for Redis® alternatives: Valkey, Redict, KeyDB, Dragonfly, Garnet, ...
## Disclaimer
This app is NOT an official Redis® Ltd / Redis® OSS / Webdis product. Redis is a registered trademark of Redis Ltd.
**Never expose your Redis® or Webdis to public without security protection!!!** You are response for protecting your server.
## Usage
1. Install Redis®.
2. Install Webdis.
3. In `webdis.json`, configure the ACL for using password and username, and / or ip filters. **Never expose your Redis® or Webdis to public without security protection!!!**.
4. Install and configure reverse proxy, firewall, https, etc. (You have to configure HTTPS correctly if you want to use it on iOS)
5. In Remotely Save settings, enter your server address, username, password, and adjust the "base dir". Check connection.
6. Sync!
7. Serveral keys and values will be generated in your Redis® database:
```
rs:fs:v1:${encodeURIComponent(vaultName+'/'+folderStructure+'/'+fileName)}:meta # you can HGETALL it
rs:fs:v1:${encodeURIComponent(vaultName+'/'+folderStructure+'/'+fileName)}:content # you can GET it
```

View File

@ -1,7 +1,7 @@
import dotenv from "dotenv/config";
import "dotenv/config";
import esbuild from "esbuild";
import process from "process";
import inlineWorkerPlugin from "esbuild-plugin-inline-worker";
import process from "process";
// import builtins from 'builtin-modules'
const banner = `/*
@ -17,6 +17,11 @@ const prod = process.argv[2] === "production";
const DEFAULT_DROPBOX_APP_KEY = process.env.DROPBOX_APP_KEY || "";
const DEFAULT_ONEDRIVE_CLIENT_ID = process.env.ONEDRIVE_CLIENT_ID || "";
const DEFAULT_ONEDRIVE_AUTHORITY = process.env.ONEDRIVE_AUTHORITY || "";
const DEFAULT_REMOTELYSAVE_WEBSITE = process.env.REMOTELYSAVE_WEBSITE || "";
const DEFAULT_REMOTELYSAVE_CLIENT_ID = process.env.REMOTELYSAVE_CLIENT_ID || "";
const DEFAULT_GOOGLEDRIVE_CLIENT_ID = process.env.GOOGLEDRIVE_CLIENT_ID || "";
const DEFAULT_GOOGLEDRIVE_CLIENT_SECRET =
process.env.GOOGLEDRIVE_CLIENT_SECRET || "";
esbuild
.context({
@ -36,6 +41,7 @@ esbuild
"net",
"http",
"https",
"vm",
// ...builtins
],
inject: ["./esbuild.injecthelper.mjs"],
@ -51,6 +57,10 @@ esbuild
"process.env.DEFAULT_DROPBOX_APP_KEY": `"${DEFAULT_DROPBOX_APP_KEY}"`,
"process.env.DEFAULT_ONEDRIVE_CLIENT_ID": `"${DEFAULT_ONEDRIVE_CLIENT_ID}"`,
"process.env.DEFAULT_ONEDRIVE_AUTHORITY": `"${DEFAULT_ONEDRIVE_AUTHORITY}"`,
"process.env.DEFAULT_REMOTELYSAVE_WEBSITE": `"${DEFAULT_REMOTELYSAVE_WEBSITE}"`,
"process.env.DEFAULT_REMOTELYSAVE_CLIENT_ID": `"${DEFAULT_REMOTELYSAVE_CLIENT_ID}"`,
"process.env.DEFAULT_GOOGLEDRIVE_CLIENT_ID": `"${DEFAULT_GOOGLEDRIVE_CLIENT_ID}"`,
"process.env.DEFAULT_GOOGLEDRIVE_CLIENT_SECRET": `"${DEFAULT_GOOGLEDRIVE_CLIENT_SECRET}"`,
global: "window",
"process.env.NODE_DEBUG": `undefined`, // ugly fix
"process.env.DEBUG": `undefined`, // ugly fix

View File

@ -1,2 +1,2 @@
export let Buffer = require("buffer").Buffer;
export let process = require("process/browser");
export const Buffer = require("buffer").Buffer;
export const process = require("process/browser");

View File

@ -1,11 +1,11 @@
{
"id": "remotely-save",
"name": "Remotely Save",
"version": "0.4.16",
"version": "0.5.3",
"minAppVersion": "0.13.21",
"description": "Yet another unofficial plugin allowing users to synchronize notes between local device and the cloud service.",
"author": "fyears",
"authorUrl": "https://github.com/fyears",
"isDesktopOnly": false,
"fundingUrl": "https://github.com/remotely-save/donation"
"fundingUrl": "https://remotelysave.com"
}

View File

@ -1,11 +1,11 @@
{
"id": "remotely-save",
"name": "Remotely Save",
"version": "0.4.16",
"version": "0.5.3",
"minAppVersion": "0.13.21",
"description": "Yet another unofficial plugin allowing users to synchronize notes between local device and the cloud service.",
"author": "fyears",
"authorUrl": "https://github.com/fyears",
"isDesktopOnly": false,
"fundingUrl": "https://github.com/remotely-save/donation"
"fundingUrl": "https://remotelysave.com"
}

View File

@ -1,75 +1,76 @@
{
"name": "remotely-save",
"version": "0.4.16",
"version": "0.5.3",
"description": "This is yet another sync plugin for Obsidian app.",
"scripts": {
"dev2": "node esbuild.config.mjs --watch",
"build2": "tsc -noEmit -skipLibCheck && node esbuild.config.mjs production",
"build": "webpack --mode production",
"dev": "webpack --mode development --watch",
"format": "npx prettier --trailing-comma es5 --write .",
"format": "npx @biomejs/biome check --apply .",
"clean": "npx rimraf main.js",
"test": "cross-env TS_NODE_COMPILER_OPTIONS={\\\"module\\\":\\\"commonjs\\\"} mocha -r ts-node/register 'tests/**/*.ts'"
"test": "cross-env TS_NODE_COMPILER_OPTIONS={\\\"module\\\":\\\"commonjs\\\"} mocha -r ts-node/register 'tests/**/*.ts' 'pro/tests/**/*.ts'"
},
"browser": {
"path": "path-browserify",
"process": "process/browser",
"stream": "stream-browserify",
"crypto": "crypto-browserify",
"url": "url/"
"url": "url/",
"fs": false,
"vm": false
},
"source": "main.ts",
"keywords": [],
"author": "",
"license": "Apache-2.0",
"license": "SEE LICENSE IN LICENSE",
"devDependencies": {
"@biomejs/biome": "1.7.3",
"@microsoft/microsoft-graph-types": "^2.40.0",
"@types/chai": "^4.3.14",
"@types/chai-as-promised": "^7.1.8",
"@types/jsdom": "^21.1.6",
"@types/lodash": "^4.14.202",
"@types/lodash": "^4.17.0",
"@types/mime-types": "^2.1.4",
"@types/mocha": "^10.0.6",
"@types/mustache": "^4.2.5",
"@types/node": "^20.10.4",
"@types/node": "^20.12.7",
"@types/qrcode": "^1.5.5",
"builtin-modules": "^3.3.0",
"chai": "^4.4.1",
"chai-as-promised": "^7.1.1",
"cross-env": "^7.0.3",
"dotenv": "^16.3.1",
"esbuild": "^0.19.9",
"dotenv": "^16.4.5",
"esbuild": "^0.20.2",
"esbuild-plugin-inline-worker": "^0.1.1",
"jsdom": "^23.0.1",
"jsdom": "^24.0.0",
"mocha": "^10.4.0",
"npm-check-updates": "^16.14.12",
"obsidian": "^1.4.11",
"prettier": "^3.1.1",
"npm-check-updates": "^16.14.20",
"obsidian": "^1.5.7",
"ts-loader": "^9.5.1",
"ts-node": "^10.9.2",
"tslib": "^2.6.2",
"typescript": "^5.3.3",
"typescript": "^5.4.5",
"webdav-server": "^2.6.2",
"webpack": "^5.89.0",
"webpack": "^5.91.0",
"webpack-cli": "^5.1.4",
"worker-loader": "^3.0.8"
},
"dependencies": {
"@aws-sdk/client-s3": "^3.474.0",
"@aws-sdk/lib-storage": "^3.474.0",
"@aws-sdk/signature-v4-crt": "^3.474.0",
"@aws-sdk/types": "^3.468.0",
"@azure/msal-node": "^2.6.0",
"@aws-sdk/client-s3": "^3.563.0",
"@aws-sdk/lib-storage": "^3.563.0",
"@aws-sdk/signature-v4-crt": "^3.556.0",
"@aws-sdk/types": "^3.535.0",
"@azure/msal-node": "^2.7.0",
"@fyears/rclone-crypt": "^0.0.7",
"@fyears/tsqueue": "^1.0.1",
"@microsoft/microsoft-graph-client": "^3.0.7",
"@smithy/fetch-http-handler": "^2.3.1",
"@smithy/protocol-http": "^3.0.11",
"@smithy/querystring-builder": "^2.0.15",
"acorn": "^8.11.2",
"@sanity/diff-match-patch": "^3.1.1",
"@smithy/fetch-http-handler": "^2.5.0",
"@smithy/protocol-http": "^3.3.0",
"@smithy/querystring-builder": "^2.2.0",
"acorn": "^8.11.3",
"aggregate-error": "^5.0.0",
"assert": "^2.1.0",
"aws-crt": "^1.20.0",
"aws-crt": "^1.21.2",
"buffer": "^6.0.3",
"crypto-browserify": "^3.12.0",
"dropbox": "^10.34.0",
@ -77,11 +78,13 @@
"http-status-codes": "^2.3.0",
"localforage": "^1.10.0",
"localforage-getitems": "^1.4.2",
"localforage-removeitems": "^1.4.0",
"lodash": "^4.17.21",
"lucide": "^0.298.0",
"lucide": "^0.376.1",
"mime-types": "^2.1.35",
"mustache": "^4.2.0",
"nanoid": "^5.0.4",
"nanoid": "^5.0.7",
"node-diff3": "^3.1.2",
"p-queue": "^8.0.1",
"path-browserify": "^1.0.1",
"process": "^0.11.10",
@ -91,7 +94,7 @@
"stream-browserify": "^3.0.0",
"url": "^0.11.3",
"util": "^0.12.5",
"webdav": "^5.3.1",
"webdav": "^5.6.0",
"xregexp": "^5.1.1"
}
}

104
pro/LICENSE Normal file
View File

@ -0,0 +1,104 @@
# PolyForm Strict License 1.0.0
<https://polyformproject.org/licenses/strict/1.0.0>
## Acceptance
In order to get any license under these terms, you must agree
to them as both strict obligations and conditions to all
your licenses.
## Copyright License
The licensor grants you a copyright license for the software
to do everything you might do with the software that would
otherwise infringe the licensor's copyright in it for any
permitted purpose, other than distributing the software or
making changes or new works based on the software.
## Patent License
The licensor grants you a patent license for the software that
covers patent claims the licensor can license, or becomes able
to license, that you would infringe by using the software.
## Noncommercial Purposes
Any noncommercial purpose is a permitted purpose.
## Personal Uses
Personal use for research, experiment, and testing for
the benefit of public knowledge, personal study, private
entertainment, hobby projects, amateur pursuits, or religious
observance, without any anticipated commercial application,
is use for a permitted purpose.
## Noncommercial Organizations
Use by any charitable organization, educational institution,
public research organization, public safety or health
organization, environmental protection organization,
or government institution is use for a permitted purpose
regardless of the source of funding or obligations resulting
from the funding.
## Fair Use
You may have "fair use" rights for the software under the
law. These terms do not limit them.
## No Other Rights
These terms do not allow you to sublicense or transfer any of
your licenses to anyone else, or prevent the licensor from
granting licenses to anyone else. These terms do not imply
any other licenses.
## Patent Defense
If you make any written claim that the software infringes or
contributes to infringement of any patent, your patent license
for the software granted under these terms ends immediately. If
your company makes such a claim, your patent license ends
immediately for work on behalf of your company.
## Violations
The first time you are notified in writing that you have
violated any of these terms, or done anything with the software
not covered by your licenses, your licenses can nonetheless
continue if you come into full compliance with these terms,
and take practical steps to correct past violations, within
32 days of receiving notice. Otherwise, all your licenses
end immediately.
## No Liability
***As far as the law allows, the software comes as is, without
any warranty or condition, and the licensor will not be liable
to you for any damages arising out of these terms or the use
or nature of the software, under any kind of legal claim.***
## Definitions
The **licensor** is the individual or entity offering these
terms, and the **software** is the software the licensor makes
available under these terms.
**You** refers to the individual or entity agreeing to these
terms.
**Your company** is any legal entity, sole proprietorship,
or other kind of organization that you work for, plus all
organizations that have control over, are under the control of,
or are under common control with that organization. **Control**
means ownership of substantially all the assets of an entity,
or the power to direct its management and policies by vote,
contract, or otherwise. Control can be direct or indirect.
**Your licenses** are all the licenses granted to you for the
software under these terms.
**Use** means anything you do with the software requiring one
of your licenses.

25
pro/README.md Normal file
View File

@ -0,0 +1,25 @@
# Pro Features
## What?
Remotely Save has some "pro features", which users have to pay for using them.
## Sign Up / Sign In And Connect
See the tutorial about your PRO account [here](../docs/pro/README.md).
## Smart Conflict
Basic (free) version can detect conflicts, but users have to choose to keep newer version or larger version of the files.
PRO (paid) feature "Smart Conflict" gives users one more option: merge small markdown files, or duplicate large markdown files or non-markdown files.
## Sync With Google Drive
PRO (paid) feature "sync with Google Drive" allows users to to sync with Google Drive. Tutorials and limitations are documented [here](../docs/remote_services/googledrive/README.md).
## License
The codes or files or subfolders inside the current folder (`pro` in the repo), are released under "source available" license: "PolyForm Strict License 1.0.0".
Suggestions are welcome.

328
pro/src/account.ts Normal file
View File

@ -0,0 +1,328 @@
import { nanoid } from "nanoid";
import { base64url } from "rfc4648";
import {
OAUTH2_FORCE_EXPIRE_MILLISECONDS,
type RemotelySavePluginSettings,
} from "../../src/baseTypes";
import {
COMMAND_CALLBACK_PRO,
type FeatureInfo,
PRO_CLIENT_ID,
type PRO_FEATURE_TYPE,
PRO_WEBSITE,
type ProConfig,
} from "./baseTypesPro";
const site = PRO_WEBSITE;
console.debug(`remotelysave official website: ${site}`);
export const DEFAULT_PRO_CONFIG: ProConfig = {
accessToken: "",
accessTokenExpiresInMs: 0,
accessTokenExpiresAtTimeMs: 0,
refreshToken: "",
enabledProFeatures: [],
email: "",
};
/**
* https://datatracker.ietf.org/doc/html/rfc7636
* dBjftJeZ4CVP-mB92K27uhbUJU1p1r_wW1gFWFOEjXk
* => E9Melhoa2OwvFrEMTJguCHaoeK1t8URWbuGJSstw-cM
* @param x
* @returns BASE64URL-ENCODE(SHA256(ASCII(code_verifier)))
*/
async function codeVerifier2CodeChallenge(x: string) {
if (x === undefined || x === "") {
return "";
}
try {
return base64url.stringify(
new Uint8Array(
await crypto.subtle.digest("SHA-256", new TextEncoder().encode(x))
),
{
pad: false,
}
);
} catch (e) {
return "";
}
}
export const generateAuthUrlAndCodeVerifierChallenge = async (
hasCallback: boolean
) => {
const appKey = PRO_CLIENT_ID ?? "cli-"; // hard-code
const codeVerifier = nanoid(128);
const codeChallenge = await codeVerifier2CodeChallenge(codeVerifier);
let authUrl = `${site}/oauth2/authorize?response_type=code&client_id=${appKey}&token_access_type=offline&code_challenge_method=S256&code_challenge=${codeChallenge}&scope=pro.list.read`;
if (hasCallback) {
authUrl += `&redirect_uri=obsidian://${COMMAND_CALLBACK_PRO}`;
}
return {
authUrl,
codeVerifier,
codeChallenge,
};
};
export const sendAuthReq = async (
verifier: string,
authCode: string,
errorCallBack: any
) => {
const appKey = PRO_CLIENT_ID ?? "cli-"; // hard-code
try {
const k = {
code: authCode,
grant_type: "authorization_code",
code_verifier: verifier,
client_id: appKey,
// redirect_uri: `obsidian://${COMMAND_CALLBACK_PRO}`,
scope: "pro.list.read",
};
// console.debug(k);
const resp1 = await fetch(`${site}/api/v1/oauth2/token`, {
method: "POST",
body: new URLSearchParams(k),
});
const resp2 = await resp1.json();
return resp2;
} catch (e) {
console.error(e);
if (errorCallBack !== undefined) {
await errorCallBack(e);
}
}
};
export const sendRefreshTokenReq = async (refreshToken: string) => {
const appKey = PRO_CLIENT_ID ?? "cli-"; // hard-code
try {
console.info("start auto getting refreshed Remotely Save access token.");
const resp1 = await fetch(`${site}/api/v1/oauth2/token`, {
method: "POST",
body: new URLSearchParams({
grant_type: "refresh_token",
refresh_token: refreshToken,
client_id: appKey,
scope: "pro.list.read",
}),
});
const resp2: AuthResError | AuthResSucc = await resp1.json();
console.info("finish auto getting refreshed Remotely Save access token.");
return resp2;
} catch (e) {
console.error(e);
throw e;
}
};
interface AuthResError {
error: "invalid_request";
}
interface AuthResSucc {
error: undefined; // needed for typescript
refresh_token?: string;
access_token: string;
expires_in: number;
}
export const setConfigBySuccessfullAuthInplace = async (
config: ProConfig,
authRes: AuthResError | AuthResSucc,
saveUpdatedConfigFunc: () => Promise<any> | undefined
) => {
if (authRes.error !== undefined) {
throw Error(`you should not save the setting for ${authRes.error}`);
}
config.accessToken = authRes.access_token;
config.accessTokenExpiresAtTimeMs =
Date.now() + authRes.expires_in * 1000 - 5 * 60 * 1000;
config.accessTokenExpiresInMs = authRes.expires_in * 1000;
config.refreshToken = authRes.refresh_token || config.refreshToken;
// manually set it expired after 80 days;
config.credentialsShouldBeDeletedAtTimeMs =
Date.now() + OAUTH2_FORCE_EXPIRE_MILLISECONDS;
await saveUpdatedConfigFunc?.();
console.info(
"finish updating local info of Remotely Save official website token"
);
};
export const getAccessToken = async (
config: ProConfig,
saveUpdatedConfigFunc: () => Promise<any> | undefined
) => {
const ts = Date.now();
if (
config.accessToken !== undefined &&
config.accessToken !== "" &&
config.accessTokenExpiresAtTimeMs > ts &&
(config.credentialsShouldBeDeletedAtTimeMs ?? ts + 1000 * 1000) > ts
) {
return config.accessToken;
}
console.debug(
`currently, accessToken=${config.accessToken}, accessTokenExpiresAtTimeMs=${
config.accessTokenExpiresAtTimeMs
}, credentialsShouldBeDeletedAtTimeMs=${
config.credentialsShouldBeDeletedAtTimeMs
},comp1=${config.accessTokenExpiresAtTimeMs > ts}, comp2=${
(config.credentialsShouldBeDeletedAtTimeMs ?? ts + 1000 * 1000) > ts
}`
);
// try to get it again??
const res = await sendRefreshTokenReq(config.refreshToken ?? "refresh-");
await setConfigBySuccessfullAuthInplace(config, res, saveUpdatedConfigFunc);
if (res.error !== undefined) {
throw Error("cannot update accessToken");
}
return res.access_token;
};
export const getAndSaveProFeatures = async (
config: ProConfig,
pluginVersion: string,
saveUpdatedConfigFunc: () => Promise<any> | undefined
) => {
const access = await getAccessToken(config, saveUpdatedConfigFunc);
const resp1 = await fetch(`${site}/api/v1/pro/list`, {
method: "GET",
headers: {
Authorization: `Bearer ${access}`,
"REMOTELYSAVE-API-Plugin-Ver": pluginVersion,
},
});
const rsp2: {
proFeatures: FeatureInfo[];
} = await resp1.json();
config.enabledProFeatures = rsp2.proFeatures;
await saveUpdatedConfigFunc?.();
return rsp2;
};
export const getAndSaveProEmail = async (
config: ProConfig,
pluginVersion: string,
saveUpdatedConfigFunc: () => Promise<any> | undefined
) => {
const access = await getAccessToken(config, saveUpdatedConfigFunc);
const resp1 = await fetch(`${site}/api/v1/profile/list`, {
method: "GET",
headers: {
Authorization: `Bearer ${access}`,
"REMOTELYSAVE-API-Plugin-Ver": pluginVersion,
},
});
const rsp2: {
email: string;
} = await resp1.json();
config.email = rsp2.email;
await saveUpdatedConfigFunc?.();
return rsp2;
};
/**
* If the check doesn't pass, the function should throw the error
* @returns
*/
export const checkProRunnableAndFixInplace = async (
featuresToCheck: PRO_FEATURE_TYPE[],
config: RemotelySavePluginSettings,
pluginVersion: string,
saveUpdatedConfigFunc: () => Promise<any> | undefined
): Promise<true> => {
console.debug(`checkProRunnableAndFixInplace`);
// many checks if status is valid
// no account
if (config.pro === undefined || config.pro.refreshToken === undefined) {
throw Error(`you need to "connect" to your account to use PRO features`);
}
// every features should have at most 40 days expiration dates
// and if the time has expired, we also check
const msIn40Days = 1000 * 60 * 60 * 24 * 40;
for (const f of config.pro.enabledProFeatures) {
const tooFarInTheFuture = f.expireAtTimeMs >= Date.now() + msIn40Days;
const alreadyExpired = f.expireAtTimeMs <= Date.now();
if (tooFarInTheFuture || alreadyExpired) {
console.info(
`the pro feature is too far in the future and has expired, check again.`
);
await getAndSaveProFeatures(
config.pro,
pluginVersion,
saveUpdatedConfigFunc
);
break;
}
}
const errorMsgs = [];
// check for the features
if (featuresToCheck.contains("feature-smart_conflict")) {
if (config.conflictAction === "smart_conflict") {
if (
config.pro.enabledProFeatures.filter(
(x) => x.featureName === "feature-smart_conflict"
).length === 1
) {
// good to go
} else {
errorMsgs.push(
`You're trying to use "smart conflict" PRO feature but you haven't subscribe to it.`
);
}
} else {
// good to go
}
}
if (featuresToCheck.contains("feature-google_drive")) {
console.debug(
`checking "feature-google_drive", serviceType=${config.serviceType}`
);
console.debug(
`enabledProFeatures=${JSON.stringify(config.pro.enabledProFeatures)}`
);
if (config.serviceType === "googledrive") {
if (
config.pro.enabledProFeatures.filter(
(x) => x.featureName === "feature-google_drive"
).length === 1
) {
// good to go
} else {
errorMsgs.push(
`You're trying to use "sync with Google Drive" PRO feature but you haven't subscribe to it.`
);
}
} else {
// good to go
}
}
if (errorMsgs.length !== 0) {
throw Error(errorMsgs.join("\n\n"));
}
return true;
};

40
pro/src/baseTypesPro.ts Normal file
View File

@ -0,0 +1,40 @@
export const MERGABLE_SIZE = 1000 * 1000; // 1 MB
export const COMMAND_CALLBACK_PRO = "remotely-save-cb-pro";
export const PRO_CLIENT_ID = process.env.DEFAULT_REMOTELYSAVE_CLIENT_ID;
export const PRO_WEBSITE = process.env.DEFAULT_REMOTELYSAVE_WEBSITE;
export type PRO_FEATURE_TYPE =
| "feature-smart_conflict"
| "feature-google_drive";
export interface FeatureInfo {
featureName: PRO_FEATURE_TYPE;
enableAtTimeMs: bigint;
expireAtTimeMs: bigint;
}
export interface ProConfig {
email?: string;
refreshToken?: string;
accessToken: string;
accessTokenExpiresInMs: number;
accessTokenExpiresAtTimeMs: number;
enabledProFeatures: FeatureInfo[];
credentialsShouldBeDeletedAtTimeMs?: number;
}
export interface GoogleDriveConfig {
accessToken: string;
accessTokenExpiresInMs: number;
accessTokenExpiresAtTimeMs: number;
refreshToken: string;
remoteBaseDir?: string;
credentialsShouldBeDeletedAtTimeMs?: number;
scope: "https://www.googleapis.com/auth/drive.file";
}
export const DEFAULT_GOOGLEDRIVE_CLIENT_ID =
process.env.DEFAULT_GOOGLEDRIVE_CLIENT_ID;
export const DEFAULT_GOOGLEDRIVE_CLIENT_SECRET =
process.env.DEFAULT_GOOGLEDRIVE_CLIENT_SECRET;

257
pro/src/conflictLogic.ts Normal file
View File

@ -0,0 +1,257 @@
import isEqual from "lodash/isEqual";
// import {
// makePatches,
// applyPatches,
// stringifyPatches,
// parsePatch,
// } from "@sanity/diff-match-patch";
import {
LCS,
diff3Merge,
diffComm,
diffPatch,
mergeDiff3,
mergeDigIn,
patch,
} from "node-diff3";
import type { Entity } from "../../src/baseTypes";
import { copyFile } from "../../src/copyLogic";
import type { FakeFs } from "../../src/fsAll";
import { MERGABLE_SIZE } from "./baseTypesPro";
export function isMergable(a: Entity, b?: Entity) {
if (b !== undefined && a.keyRaw !== b.keyRaw) {
return false;
}
return (
!a.keyRaw.endsWith("/") &&
a.sizeRaw <= MERGABLE_SIZE &&
(a.keyRaw.endsWith(".md") || a.keyRaw.endsWith(".markdown"))
);
}
/**
* slightly modify to adjust in markdown context
* @param a
* @param o
* @param b
*/
function mergeDigInModified(a: string, o: string, b: string) {
const { conflict, result } = mergeDigIn(a, o, b);
for (let index = 0; index < result.length; ++index) {
if (["<<<<<<<", "=======", ">>>>>>>"].contains(result[index])) {
result[index] = "`" + result[index] + "`";
}
}
return {
conflict,
result,
};
}
function getLCSText(a: string, b: string) {
const aa = a.split("\n");
const bb = b.split("\n");
let raw = LCS(aa, bb);
const k: string[] = [];
do {
k.unshift(aa[raw.buffer1index]);
raw = raw.chain as any;
} while (raw !== null && raw !== undefined && raw.buffer1index !== -1);
return k.join("\n");
}
/**
* It's tricky. We find LCS then pretend it's the original text
* @param a
* @param b
* @returns
*/
function twoWayMerge(a: string, b: string): string {
// const c = getLCSText(a, b);
// const patches = makePatches(c, a);
// const [d] = applyPatches(patches, b);
const c = getLCSText(a, b);
const d = mergeDigInModified(a, c, b).result.join("\n");
return d;
}
/**
* Originally three way merge.
* @param a
* @param b
* @param orig
* @returns
*/
function threeWayMerge(a: string, b: string, orig: string) {
return mergeDigInModified(a, orig, b).result.join("\n");
}
export async function mergeFile(
key: string,
left: FakeFs,
right: FakeFs,
contentOrig: ArrayBuffer | null | undefined
) {
// console.debug(
// `mergeFile: key=${key}, left=${left.kind}, right=${right.kind}`
// );
if (key.endsWith("/")) {
throw Error(`should not call ${key} in mergeFile`);
}
if (!key.endsWith(".md") && !key.endsWith(".markdown")) {
throw Error(`currently only support markdown files in mergeFile`);
}
const [contentLeft, contentRight] = await Promise.all([
left.readFile(key),
right.readFile(key),
]);
let newArrayBuffer: ArrayBuffer | undefined = undefined;
const decoder = new TextDecoder("utf-8");
if (isEqual(contentLeft, contentRight)) {
// we are lucky enough
newArrayBuffer = contentLeft;
// TODO: save the write
} else {
if (contentOrig === null || contentOrig === undefined) {
const newText = twoWayMerge(
decoder.decode(contentLeft),
decoder.decode(contentRight)
);
// no need to worry about the offset here because the array is new and not sliced
newArrayBuffer = new TextEncoder().encode(newText).buffer;
} else {
const newText = threeWayMerge(
decoder.decode(contentLeft),
decoder.decode(contentRight),
decoder.decode(contentOrig)
);
newArrayBuffer = new TextEncoder().encode(newText).buffer;
}
}
const mtime = Date.now();
// left (local) must wait for the right
// because the mtime might be different after upload
// upload firstly
const rightEntity = await right.writeFile(key, newArrayBuffer, mtime, mtime);
// write local secondly
const leftEntity = await left.writeFile(
key,
newArrayBuffer,
rightEntity.mtimeCli ?? mtime,
rightEntity.mtimeCli ?? mtime
);
return {
entity: rightEntity,
content: newArrayBuffer,
};
}
export function getFileRename(key: string) {
if (
key === "" ||
key === "." ||
key === ".." ||
key === "/" ||
key.endsWith("/")
) {
throw Error(`we cannot rename key=${key}`);
}
const segsPath = key.split("/");
const name = segsPath[segsPath.length - 1];
const segsName = name.split(".");
if (segsName.length === 0) {
throw Error(`we cannot rename key=${key}`);
} else if (segsName.length === 1) {
// name = "kkk" without any dot
segsPath[segsPath.length - 1] = `${name}.dup`;
} else if (segsName.length === 2) {
if (segsName[0] === "") {
// name = ".kkkk" with leading dot
segsPath[segsPath.length - 1] = `${name}.dup`;
} else if (segsName[1] === "") {
// name = "kkkk." with tailing dot
segsPath[segsPath.length - 1] = `${segsName[0]}.dup`;
} else {
// name = "aaa.bbb" normally
segsPath[segsPath.length - 1] = `${segsName[0]}.dup.${segsName[1]}`;
}
} else {
// name = "[...].bbb.ccc"
const firstPart = segsName.slice(0, segsName.length - 1).join(".");
const thirdPart = segsName[segsName.length - 1];
segsPath[segsPath.length - 1] = `${firstPart}.dup.${thirdPart}`;
}
const res = segsPath.join("/");
return res;
}
/**
* local: x.md -> x.dup.md -> upload to remote
* remote: x.md -> download to local -> using original name x.md
*/
export async function duplicateFile(
key: string,
left: FakeFs,
right: FakeFs,
uploadCallback: (entity: Entity) => Promise<any>,
downloadCallback: (entity: Entity) => Promise<any>
) {
let key2 = getFileRename(key);
let usable = false;
do {
try {
const s = await left.stat(key2);
if (s === null || s === undefined) {
throw Error(`not exist $${key2}`);
}
console.debug(`key2=${key2} exists, cannot use for new file`);
key2 = getFileRename(key2);
console.debug(`key2=${key2} is prepared for next try`);
} catch (e) {
// not exists, exactly what we want
console.debug(`key2=${key2} doesn't exist, usable for new file`);
usable = true;
}
} while (!usable);
await left.rename(key, key2);
/**
* x.dup.md -> upload to remote
*/
async function f1() {
const k = await copyFile(key2, left, right);
await uploadCallback(k.entity);
return k.entity;
}
/**
* x.md -> download to local
*/
async function f2() {
const k = await copyFile(key, right, left);
await downloadCallback(k.entity);
return k.entity;
}
const [resUpload, resDownload] = await Promise.all([f1(), f2()]);
return {
upload: resUpload,
download: resDownload,
};
}

765
pro/src/fsGoogleDrive.ts Normal file
View File

@ -0,0 +1,765 @@
// https://developers.google.com/identity/protocols/oauth2/native-app
// https://developers.google.com/identity/protocols/oauth2/javascript-implicit-flow
// https://developers.google.com/identity/protocols/oauth2/web-server
import { entries } from "lodash";
import * as mime from "mime-types";
import { requestUrl } from "obsidian";
import PQueue from "p-queue";
import { DEFAULT_CONTENT_TYPE, type Entity } from "../../src/baseTypes";
import { FakeFs } from "../../src/fsAll";
import {
getFolderLevels,
splitFileSizeToChunkRanges,
unixTimeToStr,
} from "../../src/misc";
import {
DEFAULT_GOOGLEDRIVE_CLIENT_ID,
DEFAULT_GOOGLEDRIVE_CLIENT_SECRET,
type GoogleDriveConfig,
} from "./baseTypesPro";
export const DEFAULT_GOOGLEDRIVE_CONFIG: GoogleDriveConfig = {
accessToken: "",
refreshToken: "",
accessTokenExpiresInMs: 0,
accessTokenExpiresAtTimeMs: 0,
credentialsShouldBeDeletedAtTimeMs: 0,
scope: "https://www.googleapis.com/auth/drive.file",
};
const FOLDER_MIME_TYPE = "application/vnd.google-apps.folder";
/**
* A simplified version of the type
*
*/
interface File {
kind?: string;
driveId?: string;
fileExtension?: string;
copyRequiresWriterPermission?: boolean;
md5Checksum?: string;
writersCanShare?: boolean;
viewedByMe?: boolean;
mimeType?: string;
parents?: string[];
thumbnailLink?: string;
iconLink?: string;
shared?: boolean;
headRevisionId?: string;
webViewLink?: string;
webContentLink?: string;
size?: string;
viewersCanCopyContent?: boolean;
hasThumbnail?: boolean;
spaces?: string[];
folderColorRgb?: string;
id?: string;
name?: string;
description?: string;
starred?: boolean;
trashed?: boolean;
explicitlyTrashed?: boolean;
createdTime?: string;
modifiedTime?: string;
modifiedByMeTime?: string;
viewedByMeTime?: string;
sharedWithMeTime?: string;
quotaBytesUsed?: string;
version?: string;
originalFilename?: string;
ownedByMe?: boolean;
fullFileExtension?: string;
isAppAuthorized?: boolean;
teamDriveId?: string;
hasAugmentedPermissions?: boolean;
thumbnailVersion?: string;
trashedTime?: string;
modifiedByMe?: boolean;
permissionIds?: string[];
resourceKey?: string;
sha1Checksum?: string;
sha256Checksum?: string;
}
interface GDEntity extends Entity {
id: string;
parentID: string | undefined;
parentIDPath: string | undefined;
isFolder: boolean;
}
/**
* https://developers.google.com/identity/protocols/oauth2/web-server#httprest_7
* @param refreshToken
*/
export const sendRefreshTokenReq = async (refreshToken: string) => {
console.debug(`refreshing token`);
const x = await fetch("https://oauth2.googleapis.com/token", {
method: "POST",
headers: {
"Content-Type": "application/x-www-form-urlencoded",
},
body: new URLSearchParams({
client_id: DEFAULT_GOOGLEDRIVE_CLIENT_ID ?? "",
client_secret: DEFAULT_GOOGLEDRIVE_CLIENT_SECRET ?? "",
grant_type: "refresh_token",
refresh_token: refreshToken,
}).toString(),
});
if (x.status === 200) {
const y = await x.json();
console.debug(`new token obtained`);
return y;
} else {
throw Error(`cannot refresh an access token`);
}
// {
// "access_token": "1/fFAGRNJru1FTz70BzhT3Zg",
// "expires_in": 3920,
// "scope": "https://www.googleapis.com/auth/drive.file",
// "token_type": "Bearer"
// }
};
const fromFileToGDEntity = (
file: File,
parentID: string,
parentFolderPath: string | undefined /* for bfs */
) => {
if (parentID === undefined || parentID === "" || parentID === "root") {
throw Error(`parentID=${parentID} should not be in fromFileToGDEntity`);
}
let keyRaw = file.name!;
if (
parentFolderPath !== undefined &&
parentFolderPath !== "" &&
parentFolderPath !== "/"
) {
if (!parentFolderPath.endsWith("/")) {
throw Error(
`parentFolderPath=${parentFolderPath} should not be in fromFileToGDEntity`
);
}
keyRaw = `${parentFolderPath}${file.name}`;
}
const isFolder = file.mimeType === FOLDER_MIME_TYPE;
if (isFolder) {
keyRaw = `${keyRaw}/`;
}
return {
key: keyRaw,
keyRaw: keyRaw,
mtimeCli: Date.parse(file.modifiedTime!),
mtimeSvr: Date.parse(file.modifiedTime!),
size: isFolder ? 0 : Number.parseInt(file.size!),
sizeRaw: isFolder ? 0 : Number.parseInt(file.size!),
hash: isFolder ? undefined : file.md5Checksum!,
id: file.id!,
parentID: parentID,
isFolder: isFolder,
} as GDEntity;
};
export class FakeFsGoogleDrive extends FakeFs {
kind: string;
googleDriveConfig: GoogleDriveConfig;
remoteBaseDir: string;
vaultFolderExists: boolean;
saveUpdatedConfigFunc: () => Promise<any>;
keyToGDEntity: Record<string, GDEntity>;
baseDirID: string;
constructor(
googleDriveConfig: GoogleDriveConfig,
vaultName: string,
saveUpdatedConfigFunc: () => Promise<any>
) {
super();
this.kind = "googledrive";
this.googleDriveConfig = googleDriveConfig;
this.remoteBaseDir =
this.googleDriveConfig.remoteBaseDir || vaultName || "";
this.vaultFolderExists = false;
this.saveUpdatedConfigFunc = saveUpdatedConfigFunc;
this.keyToGDEntity = {};
this.baseDirID = "";
}
async _init() {
// get accessToken
await this._getAccessToken();
// check vault folder exists
if (this.vaultFolderExists) {
// pass
} else {
const q = encodeURIComponent(
`name='${this.remoteBaseDir}' and mimeType='application/vnd.google-apps.folder' and trashed=false`
);
const url: string = `https://www.googleapis.com/drive/v3/files?q=${q}&pageSize=1000&fields=kind,nextPageToken,files(kind,fileExtension,md5Checksum,mimeType,parents,size,spaces,id,name,trashed,createdTime,modifiedTime,quotaBytesUsed,originalFilename,fullFileExtension,sha1Checksum,sha256Checksum)`;
const k = await fetch(url, {
method: "GET",
headers: {
Authorization: `Bearer ${await this._getAccessToken()}`,
},
});
const k1: { files: File[] } = await k.json();
// console.debug(k1);
if (k1.files.length > 0) {
// yeah we find it
this.baseDirID = k1.files[0].id!;
this.vaultFolderExists = true;
} else {
// wait, we need to create the folder!
console.debug(`we need to create the base dir ${this.remoteBaseDir}`);
const meta: any = {
mimeType: FOLDER_MIME_TYPE,
name: this.remoteBaseDir,
};
const res = await fetch("https://www.googleapis.com/drive/v3/files", {
method: "POST",
headers: {
Authorization: `Bearer ${await this._getAccessToken()}`,
"Content-Type": "application/json",
},
body: JSON.stringify(meta),
});
const res2: File = await res.json();
if (res.status === 200) {
console.debug(`succeed to create the base dir ${this.remoteBaseDir}`);
this.baseDirID = res2.id!;
this.vaultFolderExists = true;
} else {
throw Error(
`cannot create base dir ${this.remoteBaseDir} in init func.`
);
}
}
}
}
async _getAccessToken() {
if (
this.googleDriveConfig.accessToken === "" ||
this.googleDriveConfig.refreshToken === ""
) {
throw Error("The user has not manually auth yet.");
}
const ts = Date.now();
if (this.googleDriveConfig.accessTokenExpiresAtTimeMs > ts) {
return this.googleDriveConfig.accessToken;
}
// refresh
const k = await sendRefreshTokenReq(this.googleDriveConfig.refreshToken);
this.googleDriveConfig.accessToken = k.access_token;
this.googleDriveConfig.accessTokenExpiresInMs = k.expires_in * 1000;
this.googleDriveConfig.accessTokenExpiresAtTimeMs =
ts + k.expires_in * 1000 - 60 * 2 * 1000;
await this.saveUpdatedConfigFunc();
console.info("Google Drive accessToken updated");
return this.googleDriveConfig.accessToken;
}
/**
* https://developers.google.com/drive/api/reference/rest/v3/files/list
*/
async walk(): Promise<Entity[]> {
await this._init();
const allFiles: GDEntity[] = [];
// bfs
const queue = new PQueue({
concurrency: 5, // TODO: make it configurable?
autoStart: true,
});
queue.on("error", (error) => {
queue.pause();
queue.clear();
throw error;
});
let parents = [
{
id: this.baseDirID, // special init, from already created root folder ID
folderPath: "",
},
];
while (parents.length !== 0) {
const children: typeof parents = [];
for (const { id, folderPath } of parents) {
queue.add(async () => {
const filesUnderFolder = await this._walkFolder(id, folderPath);
for (const f of filesUnderFolder) {
allFiles.push(f);
if (f.isFolder) {
// keyRaw itself already has a tailing slash, no more slash here
// keyRaw itself also already has full path
const child = {
id: f.id,
folderPath: f.keyRaw,
};
// console.debug(
// `looping result of _walkFolder(${id},${folderPath}), adding child=${JSON.stringify(
// child
// )}`
// );
children.push(child);
}
}
});
}
await queue.onIdle();
parents = children;
}
// console.debug(`in the end of walk:`);
// console.debug(allFiles);
// console.debug(this.keyToGDEntity);
return allFiles;
}
async _walkFolder(parentID: string, parentFolderPath: string) {
// console.debug(
// `input of single level: parentID=${parentID}, parentFolderPath=${parentFolderPath}`
// );
const filesOneLevel: GDEntity[] = [];
let nextPageToken: string | undefined = undefined;
if (parentID === undefined || parentID === "" || parentID === "root") {
// we should never start from root
// because we encapsulate the vault inside a folder
throw Error(`something goes wrong walking folder`);
}
do {
const q = encodeURIComponent(
`'${parentID}' in parents and trashed=false`
);
const pageToken =
nextPageToken !== undefined ? `&pageToken=${nextPageToken}` : "";
const url: string = `https://www.googleapis.com/drive/v3/files?q=${q}&pageSize=1000&fields=kind,nextPageToken,files(kind,fileExtension,md5Checksum,mimeType,parents,size,spaces,id,name,trashed,createdTime,modifiedTime,quotaBytesUsed,originalFilename,fullFileExtension,sha1Checksum,sha256Checksum)${pageToken}`;
const k = await fetch(url, {
method: "GET",
headers: {
Authorization: `Bearer ${await this._getAccessToken()}`,
},
});
if (k.status !== 200) {
throw Error(`cannot walk for parentID=${parentID}`);
}
const k1 = await k.json();
// console.debug(k1);
for (const file of k1.files as File[]) {
const entity = fromFileToGDEntity(file, parentID, parentFolderPath);
this.keyToGDEntity[entity.keyRaw] = entity; // build cache
filesOneLevel.push(entity);
}
nextPageToken = k1.nextPageToken;
} while (nextPageToken !== undefined);
// console.debug(filesOneLevel);
return filesOneLevel;
}
async walkPartial(): Promise<Entity[]> {
await this._init();
const filesInLevel = await this._walkFolder(this.baseDirID, "");
return filesInLevel;
}
/**
* https://developers.google.com/drive/api/reference/rest/v3/files/get
* https://developers.google.com/drive/api/guides/fields-parameter
*/
async stat(key: string): Promise<Entity> {
await this._init();
// TODO: we already have a cache, should we call again?
const cachedEntity = this.keyToGDEntity[key];
const fileID = cachedEntity?.id;
if (cachedEntity === undefined || fileID === undefined) {
throw Error(`no fileID found for key=${key}`);
}
const url: string = `https://www.googleapis.com/drive/v3/files/${fileID}?fields=kind,fileExtension,md5Checksum,mimeType,parents,size,spaces,id,name,trashed,createdTime,modifiedTime,quotaBytesUsed,originalFilename,fullFileExtension,sha1Checksum,sha256Checksum`;
const k = await fetch(url, {
method: "GET",
headers: {
Authorization: `Bearer ${await this._getAccessToken()}`,
},
});
if (k.status !== 200) {
throw Error(`cannot get file meta fileID=${fileID}, key=${key}`);
}
const k1: File = await k.json();
const entity = fromFileToGDEntity(
k1,
cachedEntity.parentID!,
cachedEntity.parentIDPath!
);
// insert back to cache?? to update it??
this.keyToGDEntity[key] = entity;
return entity;
}
/**
* https://developers.google.com/drive/api/guides/folder
*/
async mkdir(
key: string,
mtime: number | undefined,
ctime: number | undefined
): Promise<Entity> {
if (!key.endsWith("/")) {
throw Error(`you should not mkdir on key=${key}`);
}
await this._init();
// xxx/ => ["xxx"]
// xxx/yyy/zzz/ => ["xxx", "xxx/yyy", "xxx/yyy/zzz"]
const folderLevels = getFolderLevels(key);
let parentFolderPath: string | undefined = undefined;
let parentID: string | undefined = undefined;
if (folderLevels.length === 0) {
throw Error(`cannot getFolderLevels of ${key}`);
} else if (folderLevels.length === 1) {
parentID = this.baseDirID;
parentFolderPath = ""; // ignore base dir
} else {
// length > 1
parentFolderPath = `${folderLevels[folderLevels.length - 2]}/`;
if (!(parentFolderPath in this.keyToGDEntity)) {
throw Error(
`parent of ${key}: ${parentFolderPath} is not created before??`
);
}
parentID = this.keyToGDEntity[parentFolderPath].id;
}
// xxx/yyy/zzz/ => ["xxx", "xxx/yyy", "xxx/yyy/zzz"] => "xxx/yyy/zzz" => "zzz"
let folderItselfWithoutSlash = folderLevels[folderLevels.length - 1];
folderItselfWithoutSlash = folderItselfWithoutSlash.split("/").pop()!;
const meta: any = {
mimeType: FOLDER_MIME_TYPE,
modifiedTime: unixTimeToStr(mtime, true),
createdTime: unixTimeToStr(ctime, true),
name: folderItselfWithoutSlash,
parents: [parentID],
};
const res = await fetch("https://www.googleapis.com/drive/v3/files", {
method: "POST",
headers: {
Authorization: `Bearer ${await this._getAccessToken()}`,
"Content-Type": "application/json",
},
body: JSON.stringify(meta),
});
if (res.status !== 200 && res.status !== 201) {
throw Error(`create folder ${key} failed! meta=${JSON.stringify(meta)}`);
}
const res2: File = await res.json();
// console.debug(res2);
const entity = fromFileToGDEntity(res2, parentID, parentFolderPath);
// insert into cache
this.keyToGDEntity[key] = entity;
return entity;
}
/**
* https://developers.google.com/drive/api/guides/manage-uploads
* https://stackoverflow.com/questions/65181932/how-i-can-upload-file-to-google-drive-with-google-drive-api
*/
async writeFile(
key: string,
content: ArrayBuffer,
mtime: number,
ctime: number
): Promise<Entity> {
if (key.endsWith("/")) {
throw Error(`should not call writeFile on ${key}`);
}
await this._init();
const contentType =
mime.contentType(mime.lookup(key) || DEFAULT_CONTENT_TYPE) ||
DEFAULT_CONTENT_TYPE;
let parentID: string | undefined = undefined;
let parentFolderPath: string | undefined = undefined;
// "xxx" => []
// "xxx/yyy/zzz.md" => ["xxx", "xxx/yyy"]
const folderLevels = getFolderLevels(key);
if (folderLevels.length === 0) {
// root
parentID = this.baseDirID;
parentFolderPath = "";
} else {
parentFolderPath = `${folderLevels[folderLevels.length - 1]}/`;
if (!(parentFolderPath in this.keyToGDEntity)) {
throw Error(
`parent of ${key}: ${parentFolderPath} is not created before??`
);
}
parentID = this.keyToGDEntity[parentFolderPath].id;
}
const fileItself = key.split("/").pop()!;
if (content.byteLength <= 5 * 1024 * 1024) {
const formData = new FormData();
const meta: any = {
name: fileItself,
modifiedTime: unixTimeToStr(mtime, true),
createdTime: unixTimeToStr(ctime, true),
parents: [parentID],
};
formData.append(
"metadata",
new Blob([JSON.stringify(meta)], {
type: "application/json; charset=UTF-8",
})
);
formData.append("media", new Blob([content], { type: contentType }));
const res = await fetch(
"https://www.googleapis.com/upload/drive/v3/files?uploadType=multipart&fields=kind,fileExtension,md5Checksum,mimeType,parents,size,spaces,id,name,trashed,createdTime,modifiedTime,quotaBytesUsed,originalFilename,fullFileExtension,sha1Checksum,sha256Checksum",
{
method: "POST",
headers: {
Authorization: `Bearer ${await this._getAccessToken()}`,
},
body: formData,
}
);
if (res.status !== 200 && res.status !== 201) {
throw Error(`create file ${key} failed! meta=${JSON.stringify(meta)}`);
}
const res2: File = await res.json();
console.debug(
`upload ${key} with ${JSON.stringify(meta)}, res2=${JSON.stringify(
res2
)}`
);
const entity = fromFileToGDEntity(res2, parentID, parentFolderPath);
// insert into cache
this.keyToGDEntity[key] = entity;
return entity;
} else {
const meta: any = {
name: fileItself,
modifiedTime: unixTimeToStr(mtime, true),
createdTime: unixTimeToStr(ctime, true),
parents: [parentID],
};
const bodyStr = JSON.stringify(meta);
const headers: HeadersInit = {
Authorization: `Bearer ${await this._getAccessToken()}`,
"Content-Type": "application/json",
"Content-Length": `${bodyStr.length}`,
"X-Upload-Content-Type": contentType,
"X-Upload-Content-Length": `${content.byteLength}`,
};
const res = await fetch(
"https://www.googleapis.com/upload/drive/v3/files?uploadType=resumable&fields=kind,fileExtension,md5Checksum,mimeType,parents,size,spaces,id,name,trashed,createdTime,modifiedTime,quotaBytesUsed,originalFilename,fullFileExtension,sha1Checksum,sha256Checksum",
{
method: "POST",
headers: headers,
body: bodyStr,
}
);
if (res.status !== 200) {
throw Error(
`create resumable file ${key} failed! meta=${JSON.stringify(
meta
)}, header=${JSON.stringify(headers)}`
);
}
const uploadLocation = res.headers.get("Location");
if (uploadLocation === null || !uploadLocation.startsWith("http")) {
throw Error(
`create resumable file ${key} failed! meta=${JSON.stringify(
meta
)}, header=${JSON.stringify(headers)}`
);
}
console.debug(`key=${key}, uploadLocaltion=${uploadLocation}`);
// multiples of 256 KB (256 x 1024 bytes) in size
const sizePerChunk = 5 * 4 * 256 * 1024; // 5.24 mb
const chunkRanges = splitFileSizeToChunkRanges(
content.byteLength,
sizePerChunk
);
let entity: GDEntity | undefined = undefined;
// TODO: deal with "Resume an interrupted upload"
// currently (202405) only assume everything goes well...
// TODO: parallel
for (const { start, end } of chunkRanges) {
console.debug(
`key=${key}, start upload chunk ${start}-${end}/${content.byteLength}`
);
const res = await fetch(uploadLocation, {
method: "PUT",
headers: {
Authorization: `Bearer ${await this._getAccessToken()}`,
"Content-Length": `${end - start + 1}`, // the number of bytes in the current chunk
"Content-Range": `bytes ${start}-${end}/${content.byteLength}`,
},
body: content.slice(start, end + 1), // TODO: slice() is a copy, may be we can optimize it
});
if (res.status >= 400 && res.status <= 599) {
throw Error(
`create resumable file ${key} failed! meta=${JSON.stringify(
meta
)}, header=${JSON.stringify(headers)}`
);
}
if (res.status === 200 || res.status === 201) {
const res2: File = await res.json();
console.debug(
`upload ${key} with ${JSON.stringify(meta)}, res2=${JSON.stringify(
res2
)}`
);
if (res2.id === undefined || res2.id === null || res2.id === "") {
// TODO: what's this??
} else {
entity = fromFileToGDEntity(res2, parentID, parentFolderPath);
// insert into cache
this.keyToGDEntity[key] = entity;
}
}
}
if (entity === undefined) {
throw Error(`something goes wrong while uploading large file ${key}`);
}
return entity;
}
}
/**
* https://developers.google.com/drive/api/reference/rest/v3/files/get
*/
async readFile(key: string): Promise<ArrayBuffer> {
if (key.endsWith("/")) {
throw Error(`you should not call readFile on ${key}`);
}
await this._init();
const fileID = this.keyToGDEntity[key]?.id;
if (fileID === undefined) {
throw Error(`no fileID found for key=${key}`);
}
const res1 = await fetch(
`https://www.googleapis.com/drive/v3/files/${fileID}?alt=media`,
{
method: "GET",
headers: {
Authorization: `Bearer ${await this._getAccessToken()}`,
},
}
);
if (res1.status !== 200) {
throw Error(`cannot download ${key} using fileID=${fileID}`);
}
const res2 = await res1.arrayBuffer();
return res2;
}
async rename(key1: string, key2: string): Promise<void> {
throw new Error("Method not implemented.");
}
/**
* https://developers.google.com/drive/api/guides/delete
* https://developers.google.com/drive/api/reference/rest/v3/files/update
*/
async rm(key: string): Promise<void> {
await this._init();
const fileID = this.keyToGDEntity[key]?.id;
if (fileID === undefined) {
throw Error(`no fileID found for key=${key}`);
}
const res1 = await fetch(
`https://www.googleapis.com/drive/v3/files/${fileID}`,
{
method: "PATCH",
headers: {
Authorization: `Bearer ${await this._getAccessToken()}`,
},
body: JSON.stringify({
trashed: true,
}),
}
);
if (res1.status !== 200) {
throw Error(`cannot rm ${key} using fileID=${fileID}`);
}
}
async checkConnect(callbackFunc?: any): Promise<boolean> {
// if we can init, we can connect
try {
await this._init();
return true;
} catch (err) {
console.debug(err);
callbackFunc?.(err);
return false;
}
}
async getUserDisplayName(): Promise<string> {
throw new Error("Method not implemented.");
}
/**
* https://developers.google.com/identity/protocols/oauth2/web-server#tokenrevoke
*/
async revokeAuth(): Promise<any> {
const x = await fetch(
`https://oauth2.googleapis.com/revoke?token=${this._getAccessToken()}`,
{
method: "POST",
headers: {
"Content-Type": "application/x-www-form-urlencoded",
},
}
);
if (x.status === 200) {
return true;
} else {
throw Error(`cannot revoke`);
}
}
allowEmptyFile(): boolean {
return true;
}
}

75
pro/src/langs/en.json Normal file
View File

@ -0,0 +1,75 @@
{
"settings_conflictaction_smart_conflict": "Smart Conflict (PRO) (beta)",
"settings_conflictaction_smart_conflict_desc": "<p><strong>!!It's a PRO feature! You need an online account for this feature!!</strong>(<a href=\"#settings-pro\">scroll down</a> for more info about PRO account.)</p><p><ul><li>For small markdown files, the plugin tries to merge them with diff3 algorithm.</li><li>For large files or not-markdown files, the plugin saves both files by renaming them.</li></ul></p><p><strong>Please manually backup your vaule before using this feature!</strong></p>",
"protocol_pro_connecting": "Connectting",
"protocol_pro_connect_manualinput_succ": "You've connected",
"protocol_pro_connect_fail": "Something went wrong from response from Remotely Save official website. Maybe the network connection is not good. Maybe you rejected the auth?",
"protocol_pro_connect_succ_revoke": "You've connected as user {{email}}. If you want to disconnect, click this button.",
"modal_googledriveauth_tutorial": "<p>Please firstly go to the address, then go on the auth flow. In the end, you will see a code, please paste that code here and submit.</p>",
"modal_googledriveauth_copybutton": "Click to copy the auth url",
"modal_googledriveauth_copynotice": "The auth url is copied to the clipboard!",
"modal_googledrivce_maualinput": "The Code from the website",
"modal_googledrivce_maualinput_desc": "Please input the code here from the end of auth flow, and press confirm.",
"modal_googledrive_maualinput_notice": "We are trying to connect to Google and update the credentials...",
"modal_googledrive_maualinput_succ_notice": "Great! The credentials are updated!",
"modal_googledrive_maualinput_fail_notice": "Oops! Failed to update the credentials. Please try again later.",
"modal_googledriverevokeauth_step1": "Step 1: Go to the following address, you can remove the connection there.",
"modal_googledriverevokeauth_step2": "Step 2: Click the button below, to clean the locally-saved login credentials.",
"modal_googledriverevokeauth_clean": "Clean Locally-Saved Login Credentials",
"modal_googledriverevokeauth_clean_desc": "You need to click the button.",
"modal_googledriverevokeauth_clean_button": "Clean",
"modal_googledriverevokeauth_clean_notice": "Cleaned!",
"modal_googledriverevokeauth_clean_fail": "Something goes wrong while revoking.",
"modal_prorevokeauth": "Revoke auth by clicking here and follow the steps.",
"modal_prorevokeauth_clean": "Clean",
"modal_prorevokeauth_clean_desc": "Clean local auth record",
"modal_prorevokeauth_clean_button": "Clean",
"modal_prorevokeauth_clean_notice": "Local auth record is cleaned",
"modal_prorevokeauth_clean_fail": "Fail to clean local auth record.",
"modal_proauth_copybutton": "Click to copy the auth url",
"modal_proauth_copynotice": "The auth url is copied to the clipboard!",
"modal_proauth_maualinput": "The Code from the website",
"modal_proauth_maualinput_desc": "Please input the code here from the end of auth flow, and press confirm.",
"modal_proauth_maualinput_notice": "Trying to connect, wait...",
"modal_proauth_maualinput_conn_fail": "Failed to connect",
"settings_googledrive": "Google Drive (PRO) (beta)",
"settings_chooseservice_googledrive": "Google Drive (PRO) (beta)",
"settings_googledrive_disclaimer1": "Disclaimer: This app is NOT an official Google product. The app just uses Google Drive's public api.",
"settings_googledrive_disclaimer2": "Disclaimer: The information is stored locally. Other malicious/harmful/faulty plugins could read the info. If you see any unintentional access to your Google Drive, please immediately disconnect this app on https://myaccount.google.com/permissions .",
"settings_googledrive_pro_desc": "<p><strong>!!It's a PRO feature of Remotely Save! You need a Remotely Save online account for this feature!!</strong>(<a href=\"#settings-pro\">scroll down</a> for more info about PRO account.)</p>",
"settings_googledrive_notshowuphint": "Google Drive Settings Not Available",
"settings_googledrive_notshowuphint_desc": "Google Drive settings are not available, because you haven't subscribed to the PRO feature in your Remotely Save account.",
"settings_googledrive_notshowuphint_view_pro": "View PRO Settings",
"settings_googledrive_folder": "We will create and sync inside the folder {{remoteBaseDir}} on your Google Drive. DO NOT create this folder by yourself manually.",
"settings_googledrive_revoke": "Revoke Auth",
"settings_googledrive_revoke_desc": "You've connected. If you want to disconnect, click this button.",
"settings_googledrive_revoke_button": "Revoke Auth",
"settings_googledrive_auth": "Auth",
"settings_googledrive_auth_desc": "Auth.",
"settings_googledrive_auth_button": "Auth",
"settings_googledrive_connect_succ": "Great! We can connect to Google Drive!",
"settings_googledrive_connect_fail": "We cannot connect to Google Drive.",
"settings_export_googledrive_button": "Export Google Drive Part",
"settings_pro": "Account (for PRO features)",
"settings_pro_tutorial": "<p>Using <stong>basic</strong> features of Remotely Save is <strong>FREE</strong> and do <strong>NOT</strong> need an account.</p><p>However, you will <strong>need</strong> an online account and <strong>PAY</strong> for the <strong>PRO</strong> features such as smart conflict.</p><p>Firstly please click the button to sign up and sign in to the website: <a href=\"https://remotelysave.com\">https://remotelysave.com</a>. Notice: It's different from, and NOT affiliated with Obsidian account.</p><p>Secondly please \"connect\" your local device to your online account.",
"settings_pro_features": "Features",
"settings_pro_features_desc": "Here are features you've enabled:<br/>{{{features}}}",
"settings_pro_features_refresh_button": "Check again",
"settings_pro_features_refresh_fetch": "Fetching...",
"settings_pro_features_refresh_succ": "Refreshed!",
"settings_pro_revoke": "Disconnect",
"settings_pro_revoke_desc": "You've connected as user {{email}}. If you want to disconnect, click this button.",
"settings_pro_revoke_button": "Disconnect",
"settings_pro_intro": "Remotely Save Online Account",
"settings_pro_intro_desc": "Click the button to jump to the website to sign up or sign in.",
"settings_pro_intro_button": "Sign Up / Sign In",
"settings_pro_auth": "Connect",
"settings_pro_auth_desc": "After you sign up and sign in the account on the website, you need to connect your plugin here to the online account. Please click the button to connect.",
"settings_pro_auth_button": "Connect"
}

9
pro/src/langs/index.ts Normal file
View File

@ -0,0 +1,9 @@
import en from "./en.json";
import zh_cn from "./zh_cn.json";
import zh_tw from "./zh_tw.json";
export const LANGS = {
en: en,
zh_cn: zh_cn,
zh_tw: zh_tw,
};

75
pro/src/langs/zh_cn.json Normal file
View File

@ -0,0 +1,75 @@
{
"settings_conflictaction_smart_conflict": "智能处理冲突 (PRO) (beta)",
"settings_conflictaction_smart_conflict_desc": "<p><strong>!!这是 PRO付费功能! 您需要在线账号来使用此功能!!</strong><a href=\"#settings-pro\">向下滑</a>可以看到 PRO 账号的更多信息。)</p><p><ul><li>小 markdown 文件,本插件尝试使用 diff3 算法合并它;</li><li>对于大文件或非 markdown 文件,本插件尝试改名字并均进行保存。</li></ul></p><p><strong>请注意先手动备份 vault 文件再用此功能!</strong></p>",
"protocol_pro_connecting": "正在连接",
"protocol_pro_connect_manualinput_succ": "连接成功",
"protocol_pro_connect_fail": "Remotely Save 官网返回错误。可能是网络连接不稳定。也可能是您拒绝了授权?",
"protocol_pro_connect_succ_revoke": "您已连接上账号 {{email}}。如果要取消连接,请点击此按钮。",
"modal_googledriveauth_tutorial": "<p>请访问此网址,然后会进入授权流程。最后,您会看到一个码,请复制粘贴到这里然后提交。</p>",
"modal_googledriveauth_copybutton": "点击以复制网址",
"modal_googledriveauth_copynotice": "网址已复制!",
"modal_googledrivce_maualinput": "网站上的码",
"modal_googledrivce_maualinput_desc": "请粘贴授权流程最后的那个码,然后点击确认。",
"modal_googledrive_maualinput_notice": "正在尝试连接 Google 并更新授权信息......",
"modal_googledrive_maualinput_succ_notice": "很好!授权信息已更新!",
"modal_googledrive_maualinput_fail_notice": "更新授权信息失败。请稍后重试。",
"modal_googledriverevokeauth_step1": "第 1 步:访问以下网址,可以删除连接。",
"modal_googledriverevokeauth_step2": "第 2 步:点击以下按钮,从而清理本地的登录信息。",
"modal_googledriverevokeauth_clean": "清理本地登录信息",
"modal_googledriverevokeauth_clean_desc": "您需要点击此按钮。",
"modal_googledriverevokeauth_clean_button": "清理",
"modal_googledriverevokeauth_clean_notice": "已清理!",
"modal_googledriverevokeauth_clean_fail": "清理授权时候发生了错误。",
"modal_prorevokeauth": "点击这里和按照步骤取消授权。",
"modal_prorevokeauth_clean": "清理",
"modal_prorevokeauth_clean_desc": "清理本地授权记录",
"modal_prorevokeauth_clean_button": "清理",
"modal_prorevokeauth_clean_notice": "清理本地授权记录完毕",
"modal_prorevokeauth_clean_fail": "清理本地授权记录粗错。",
"modal_proauth_copybutton": "点击从而复制授权网址",
"modal_proauth_copynotice": "授权网址已复制!",
"modal_proauth_maualinput": "网站的授权码",
"modal_proauth_maualinput_desc": "请输入授权流程最后一步的授权码,然后点击确认。",
"modal_proauth_maualinput_notice": "正在连接,请稍候......",
"modal_proauth_maualinput_conn_fail": "连接失败",
"settings_googledrive": "Google Drive (PRO) (beta)",
"settings_chooseservice_googledrive": "Google Drive (PRO) (beta)",
"settings_googledrive_disclaimer1": "声明:本插件不是 Google 的官方产品。只是用到了它的公开 API。",
"settings_googledrive_disclaimer2": "声明:您所输入的信息存储于本地。其它有害的或者出错的插件,是有可能读取到这些信息的。如果您发现任何不符合预期的 Google Drive 访问,请立刻在以下网站操作断开连接: https://myaccount.google.com/permissions 。",
"settings_googledrive_pro_desc": "<p><strong>!!这是 PRO付费功能! 您需要在线账号来使用此功能!!</strong><a href=\"#settings-pro\">向下滑</a>可以看到 PRO 账号的更多信息。)</p>",
"settings_googledrive_notshowuphint": "Google Drive 设置不可用",
"settings_googledrive_notshowuphint_desc": "Google Drive 设置不可用,因为您没有在 Remotely Save 账号里开启这个 PRO 功能。",
"settings_googledrive_notshowuphint_view_pro": "查看 PRO 相关设置",
"settings_googledrive_folder": "我们会在 Google Drive 创建此文件夹并同步内容进去: {{remoteBaseDir}} 。请不要手动在网站上创建。",
"settings_googledrive_revoke": "撤回鉴权",
"settings_googledrive_revoke_desc": "您现在已连接。如果想取消连接,请点击此按钮。",
"settings_googledrive_revoke_button": "撤回鉴权",
"settings_googledrive_auth": "鉴权",
"settings_googledrive_auth_desc": "鉴权.",
"settings_googledrive_auth_button": "鉴权",
"settings_googledrive_connect_succ": "很好!我们可连接上 Google Drive",
"settings_googledrive_connect_fail": "我们未能连接上 Google Drive。",
"settings_export_googledrive_button": "导出 Google Drive 部分",
"settings_pro": "账号PRO 付费功能)",
"settings_pro_tutorial": "<p>使用 Remotely Save 的<stong>基本</strong>功能是<strong>免费的</strong>,而且<strong>不</strong>需要注册对应账号。</p><p>但是,您<strong>需要</strong>注册账号和对<strong>PRO</strong>功能<strong>付费</strong>使用,如智能处理冲突功能。</p><p>第一步:点击按钮从而注册和登录网站:<a href=\"https://remotelysave.com\">https://remotelysave.com</a>。注意:这和 Obsidian 官方账号无关,是不同的账号。</p><p>第二部:点击“连接”按钮,从而连接本设备和在线账号。",
"settings_pro_features": "功能",
"settings_pro_features_desc": "您开通了以下功能:<br/>{{{features}}}",
"settings_pro_features_refresh_button": "再次检查",
"settings_pro_features_refresh_fetch": "正在获取数据......",
"settings_pro_features_refresh_succ": "已刷新!",
"settings_pro_revoke": "断开连接",
"settings_pro_revoke_desc": "您已连接上账号 {{email}}。如果要取消连接,请点击此按钮。",
"settings_pro_revoke_button": "断开连接",
"settings_pro_intro": "Remotely Save 账号",
"settings_pro_intro_desc": "点击此按钮,从而到网站上注册和登录。",
"settings_pro_intro_button": "注册或登录",
"settings_pro_auth": "连接",
"settings_pro_auth_desc": "在网站上注册和登录后,您需要“连接”本设备和在线账号。请点击按钮开始连接。",
"settings_pro_auth_button": "连接"
}

75
pro/src/langs/zh_tw.json Normal file
View File

@ -0,0 +1,75 @@
{
"settings_conflictaction_smart_conflict": "智慧處理衝突 (PRO) (beta)",
"settings_conflictaction_smart_conflict_desc": "<p><strong>!!這是 PRO付費功能! 您需要線上賬號來使用此功能!!</strong><a href=\"#settings-pro\">向下滑</a>可以看到 PRO 賬號的更多資訊。)</p><p><ul><li>小 markdown 檔案,本外掛嘗試使用 diff3 演算法合併它;</li><li>對於大檔案或非 markdown 檔案,本外掛嘗試改名字並均進行儲存。</li></ul></p><p><strong>請注意先手動備份 vault 檔案再用此功能!</strong></p>",
"protocol_pro_connecting": "正在連線",
"protocol_pro_connect_manualinput_succ": "連線成功",
"protocol_pro_connect_fail": "Remotely Save 官網返回錯誤。可能是網路連線不穩定。也可能是您拒絕了授權?",
"protocol_pro_connect_succ_revoke": "您已連線上賬號 {{email}}。如果要取消連線,請點選此按鈕。",
"modal_googledriveauth_tutorial": "<p>請訪問此網址,然後會進入授權流程。最後,您會看到一個碼,請複製貼上到這裡然後提交。</p>",
"modal_googledriveauth_copybutton": "點選以複製網址",
"modal_googledriveauth_copynotice": "網址已複製!",
"modal_googledrivce_maualinput": "網站上的碼",
"modal_googledrivce_maualinput_desc": "請貼上授權流程最後的那個碼,然後點選確認。",
"modal_googledrive_maualinput_notice": "正在嘗試連線 Google 並更新授權資訊......",
"modal_googledrive_maualinput_succ_notice": "很好!授權資訊已更新!",
"modal_googledrive_maualinput_fail_notice": "更新授權資訊失敗。請稍後重試。",
"modal_googledriverevokeauth_step1": "第 1 步:訪問以下網址,可以刪除連線。",
"modal_googledriverevokeauth_step2": "第 2 步:點選以下按鈕,從而清理本地的登入資訊。",
"modal_googledriverevokeauth_clean": "清理本地登入資訊",
"modal_googledriverevokeauth_clean_desc": "您需要點選此按鈕。",
"modal_googledriverevokeauth_clean_button": "清理",
"modal_googledriverevokeauth_clean_notice": "已清理!",
"modal_googledriverevokeauth_clean_fail": "清理授權時候發生了錯誤。",
"modal_prorevokeauth": "點選這裡和按照步驟取消授權。",
"modal_prorevokeauth_clean": "清理",
"modal_prorevokeauth_clean_desc": "清理本地授權記錄",
"modal_prorevokeauth_clean_button": "清理",
"modal_prorevokeauth_clean_notice": "清理本地授權記錄完畢",
"modal_prorevokeauth_clean_fail": "清理本地授權記錄粗錯。",
"modal_proauth_copybutton": "點選從而複製授權網址",
"modal_proauth_copynotice": "授權網址已複製!",
"modal_proauth_maualinput": "網站的授權碼",
"modal_proauth_maualinput_desc": "請輸入授權流程最後一步的授權碼,然後點選確認。",
"modal_proauth_maualinput_notice": "正在連線,請稍候......",
"modal_proauth_maualinput_conn_fail": "連線失敗",
"settings_googledrive": "Google Drive (PRO) (beta)",
"settings_chooseservice_googledrive": "Google Drive (PRO) (beta)",
"settings_googledrive_disclaimer1": "宣告:本外掛不是 Google 的官方產品。只是用到了它的公開 API。",
"settings_googledrive_disclaimer2": "宣告:您所輸入的資訊儲存於本地。其它有害的或者出錯的外掛,是有可能讀取到這些資訊的。如果您發現任何不符合預期的 Google Drive 訪問,請立刻在以下網站操作斷開連線: https://myaccount.google.com/permissions 。",
"settings_googledrive_pro_desc": "<p><strong>!!這是 PRO付費功能! 您需要線上賬號來使用此功能!!</strong><a href=\"#settings-pro\">向下滑</a>可以看到 PRO 賬號的更多資訊。)</p>",
"settings_googledrive_notshowuphint": "Google Drive 設定不可用",
"settings_googledrive_notshowuphint_desc": "Google Drive 設定不可用,因為您沒有在 Remotely Save 賬號裡開啟這個 PRO 功能。",
"settings_googledrive_notshowuphint_view_pro": "檢視 PRO 相關設定",
"settings_googledrive_folder": "我們會在 Google Drive 建立此資料夾並同步內容進去: {{remoteBaseDir}} 。請不要手動在網站上建立。",
"settings_googledrive_revoke": "撤回鑑權",
"settings_googledrive_revoke_desc": "您現在已連線。如果想取消連線,請點選此按鈕。",
"settings_googledrive_revoke_button": "撤回鑑權",
"settings_googledrive_auth": "鑑權",
"settings_googledrive_auth_desc": "鑑權.",
"settings_googledrive_auth_button": "鑑權",
"settings_googledrive_connect_succ": "很好!我們可連線上 Google Drive",
"settings_googledrive_connect_fail": "我們未能連線上 Google Drive。",
"settings_export_googledrive_button": "匯出 Google Drive 部分",
"settings_pro": "賬號PRO 付費功能)",
"settings_pro_tutorial": "<p>使用 Remotely Save 的<stong>基本</strong>功能是<strong>免費的</strong>,而且<strong>不</strong>需要註冊對應賬號。</p><p>但是,您<strong>需要</strong>註冊賬號和對<strong>PRO</strong>功能<strong>付費</strong>使用,如智慧處理衝突功能。</p><p>第一步:點選按鈕從而註冊和登入網站:<a href=\"https://remotelysave.com\">https://remotelysave.com</a>。注意:這和 Obsidian 官方賬號無關,是不同的賬號。</p><p>第二部:點選“連線”按鈕,從而連線本裝置和線上賬號。",
"settings_pro_features": "功能",
"settings_pro_features_desc": "您開通了以下功能:<br/>{{{features}}}",
"settings_pro_features_refresh_button": "再次檢查",
"settings_pro_features_refresh_fetch": "正在獲取資料......",
"settings_pro_features_refresh_succ": "已重新整理!",
"settings_pro_revoke": "斷開連線",
"settings_pro_revoke_desc": "您已連線上賬號 {{email}}。如果要取消連線,請點選此按鈕。",
"settings_pro_revoke_button": "斷開連線",
"settings_pro_intro": "Remotely Save 賬號",
"settings_pro_intro_desc": "點選此按鈕,從而到網站上註冊和登入。",
"settings_pro_intro_button": "註冊或登入",
"settings_pro_auth": "連線",
"settings_pro_auth_desc": "在網站上註冊和登入後,您需要“連線”本裝置和線上賬號。請點選按鈕開始連線。",
"settings_pro_auth_button": "連線"
}

47
pro/src/localdb.ts Normal file
View File

@ -0,0 +1,47 @@
import type { Entity } from "../../src/baseTypes";
import type { InternalDBs } from "../../src/localdb";
export const upsertFileContentHistoryByVaultAndProfile = async (
db: InternalDBs,
vaultRandomID: string,
profileID: string,
prevSync: Entity,
prevContent: ArrayBuffer
) => {
await db.fileContentHistoryTbl.setItem(
`${vaultRandomID}\t${profileID}\t${prevSync.key}`,
prevContent
);
};
export const getFileContentHistoryByVaultAndProfile = async (
db: InternalDBs,
vaultRandomID: string,
profileID: string,
prevSync: Entity
) => {
return (await db.fileContentHistoryTbl.getItem(
`${vaultRandomID}\t${profileID}\t${prevSync.key}`
)) as ArrayBuffer | null | undefined;
};
export const clearFileContentHistoryByVaultAndProfile = async (
db: InternalDBs,
vaultRandomID: string,
profileID: string,
key: string
) => {
await db.fileContentHistoryTbl.removeItem(
`${vaultRandomID}\t${profileID}\t${key}`
);
};
export const clearAllFileContentHistoryByVault = async (
db: InternalDBs,
vaultRandomID: string
) => {
const keys = (await db.fileContentHistoryTbl.keys()).filter((x) =>
x.startsWith(`${vaultRandomID}\t`)
);
await db.fileContentHistoryTbl.removeItems(keys);
};

View File

@ -0,0 +1,377 @@
import cloneDeep from "lodash/cloneDeep";
import { type App, Modal, Notice, Setting } from "obsidian";
import { getClient } from "../../src/fsGetter";
import type { TransItemType } from "../../src/i18n";
import type RemotelySavePlugin from "../../src/main";
import { stringToFragment } from "../../src/misc";
import { ChangeRemoteBaseDirModal } from "../../src/settings";
import {
DEFAULT_GOOGLEDRIVE_CONFIG,
sendRefreshTokenReq,
} from "./fsGoogleDrive";
class GoogleDriveAuthModal extends Modal {
readonly plugin: RemotelySavePlugin;
readonly authDiv: HTMLDivElement;
readonly revokeAuthDiv: HTMLDivElement;
readonly revokeAuthSetting: Setting;
readonly t: (x: TransItemType, vars?: any) => string;
constructor(
app: App,
plugin: RemotelySavePlugin,
authDiv: HTMLDivElement,
revokeAuthDiv: HTMLDivElement,
revokeAuthSetting: Setting,
t: (x: TransItemType, vars?: any) => string
) {
super(app);
this.plugin = plugin;
this.authDiv = authDiv;
this.revokeAuthDiv = revokeAuthDiv;
this.revokeAuthSetting = revokeAuthSetting;
this.t = t;
}
async onOpen() {
const { contentEl } = this;
const t = this.t;
const authUrl = "https://remotelysave.com/auth/googledrive/start";
const div2 = contentEl.createDiv();
div2.createDiv({
text: stringToFragment(t("modal_googledriveauth_tutorial")),
});
div2.createEl(
"button",
{
text: t("modal_googledriveauth_copybutton"),
},
(el) => {
el.onclick = async () => {
await navigator.clipboard.writeText(authUrl);
new Notice(t("modal_googledriveauth_copynotice"));
};
}
);
contentEl.createEl("p").createEl("a", {
href: authUrl,
text: authUrl,
});
let refreshToken = "";
new Setting(contentEl)
.setName(t("modal_googledrivce_maualinput"))
.setDesc(t("modal_googledrivce_maualinput_desc"))
.addText((text) =>
text
.setPlaceholder("")
.setValue("")
.onChange((val) => {
refreshToken = val.trim();
})
)
.addButton(async (button) => {
button.setButtonText(t("submit"));
button.onClick(async () => {
new Notice(t("modal_googledrive_maualinput_notice"));
try {
if (this.plugin.settings.googledrive === undefined) {
this.plugin.settings.googledrive = cloneDeep(
DEFAULT_GOOGLEDRIVE_CONFIG
);
}
this.plugin.settings.googledrive.refreshToken = refreshToken;
this.plugin.settings.googledrive.accessToken = "access";
this.plugin.settings.googledrive.accessTokenExpiresAtTimeMs = 1;
this.plugin.settings.googledrive.accessTokenExpiresInMs = 1;
// TODO: abstraction leaking now, how to fix?
const k = await sendRefreshTokenReq(refreshToken);
const ts = Date.now();
this.plugin.settings.googledrive.accessToken = k.access_token;
this.plugin.settings.googledrive.accessTokenExpiresInMs =
k.expires_in * 1000;
this.plugin.settings.googledrive.accessTokenExpiresAtTimeMs =
ts + k.expires_in * 1000 - 60 * 2 * 1000;
await this.plugin.saveSettings();
// try to remove data in clipboard
await navigator.clipboard.writeText("");
new Notice(t("modal_googledrive_maualinput_succ_notice"));
} catch (e) {
console.error(e);
new Notice(t("modal_googledrive_maualinput_fail_notice"));
} finally {
this.authDiv.toggleClass(
"googledrive-auth-button-hide",
this.plugin.settings.googledrive.refreshToken !== ""
);
this.revokeAuthDiv.toggleClass(
"googledrive-revoke-auth-button-hide",
this.plugin.settings.googledrive.refreshToken === ""
);
this.close();
}
});
});
}
onClose() {
const { contentEl } = this;
contentEl.empty();
}
}
class GoogleDriveRevokeAuthModal extends Modal {
readonly plugin: RemotelySavePlugin;
readonly authDiv: HTMLDivElement;
readonly revokeAuthDiv: HTMLDivElement;
readonly t: (x: TransItemType, vars?: any) => string;
constructor(
app: App,
plugin: RemotelySavePlugin,
authDiv: HTMLDivElement,
revokeAuthDiv: HTMLDivElement,
t: (x: TransItemType, vars?: any) => string
) {
super(app);
this.plugin = plugin;
this.authDiv = authDiv;
this.revokeAuthDiv = revokeAuthDiv;
this.t = t;
}
async onOpen() {
const t = this.t;
const { contentEl } = this;
contentEl.createEl("p", {
text: t("modal_googledriverevokeauth_step1"),
});
const consentUrl = "https://myaccount.google.com/permissions";
contentEl.createEl("p").createEl("a", {
href: consentUrl,
text: consentUrl,
});
contentEl.createEl("p", {
text: t("modal_googledriverevokeauth_step2"),
});
new Setting(contentEl)
.setName(t("modal_googledriverevokeauth_clean"))
.setDesc(t("modal_googledriverevokeauth_clean_desc"))
.addButton(async (button) => {
button.setButtonText(t("modal_googledriverevokeauth_clean_button"));
button.onClick(async () => {
try {
this.plugin.settings.googledrive = cloneDeep(
DEFAULT_GOOGLEDRIVE_CONFIG
);
await this.plugin.saveSettings();
this.authDiv.toggleClass(
"googledrive-auth-button-hide",
this.plugin.settings.googledrive.refreshToken !== ""
);
this.revokeAuthDiv.toggleClass(
"googledrive-revoke-auth-button-hide",
this.plugin.settings.googledrive.refreshToken === ""
);
new Notice(t("modal_googledriverevokeauth_clean_notice"));
this.close();
} catch (err) {
console.error(err);
new Notice(t("modal_googledriverevokeauth_clean_fail"));
}
});
});
}
onClose() {
const { contentEl } = this;
contentEl.empty();
}
}
export const generateGoogleDriveSettingsPart = (
containerEl: HTMLElement,
t: (x: TransItemType, vars?: any) => string,
app: App,
plugin: RemotelySavePlugin,
saveUpdatedConfigFunc: () => Promise<any> | undefined
) => {
const googleDriveDiv = containerEl.createEl("div", {
cls: "googledrive-hide",
});
googleDriveDiv.toggleClass(
"googledrive-hide",
plugin.settings.serviceType !== "googledrive"
);
googleDriveDiv.createEl("h2", { text: t("settings_googledrive") });
const googleDriveLongDescDiv = googleDriveDiv.createEl("div", {
cls: "settings-long-desc",
});
for (const c of [
t("settings_googledrive_disclaimer1"),
t("settings_googledrive_disclaimer2"),
]) {
googleDriveLongDescDiv.createEl("p", {
text: c,
cls: "googledrive-disclaimer",
});
}
googleDriveLongDescDiv.createEl("p", {
text: t("settings_googledrive_folder", {
remoteBaseDir:
plugin.settings.googledrive.remoteBaseDir || app.vault.getName(),
}),
});
googleDriveLongDescDiv.createDiv({
text: stringToFragment(t("settings_googledrive_pro_desc")),
cls: "googledrive-disclaimer",
});
const googleDriveNotShowUpHintSetting = new Setting(googleDriveDiv)
.setName(t("settings_googledrive_notshowuphint"))
.setDesc(t("settings_googledrive_notshowuphint_desc"))
.addButton(async (button) => {
button.setButtonText(t("settings_googledrive_notshowuphint_view_pro"));
button.onClick(async () => {
window.location.href = "#settings-pro";
});
});
const googleDriveAllowedToUsedDiv = googleDriveDiv.createDiv();
// if pro enabled, show up; otherwise hide.
const allowGoogleDrive =
plugin.settings.pro?.enabledProFeatures.filter(
(x) => x.featureName === "feature-google_drive"
).length === 1;
console.debug(`allow to show up google drive settings? ${allowGoogleDrive}`);
if (allowGoogleDrive) {
googleDriveAllowedToUsedDiv.removeClass("googledrive-allow-to-use-hide");
googleDriveNotShowUpHintSetting.settingEl.addClass(
"googledrive-allow-to-use-hide"
);
} else {
googleDriveAllowedToUsedDiv.addClass("googledrive-allow-to-use-hide");
googleDriveNotShowUpHintSetting.settingEl.removeClass(
"googledrive-allow-to-use-hide"
);
}
const googleDriveSelectAuthDiv = googleDriveAllowedToUsedDiv.createDiv();
const googleDriveAuthDiv = googleDriveSelectAuthDiv.createDiv({
cls: "googledrive-auth-button-hide settings-auth-related",
});
const googleDriveRevokeAuthDiv = googleDriveSelectAuthDiv.createDiv({
cls: "googledrive-revoke-auth-button-hide settings-auth-related",
});
const googleDriveRevokeAuthSetting = new Setting(googleDriveRevokeAuthDiv)
.setName(t("settings_googledrive_revoke"))
.setDesc(t("settings_googledrive_revoke_desc"))
.addButton(async (button) => {
button.setButtonText(t("settings_googledrive_revoke_button"));
button.onClick(async () => {
new GoogleDriveRevokeAuthModal(
app,
plugin,
googleDriveAuthDiv,
googleDriveRevokeAuthDiv,
t
).open();
});
});
new Setting(googleDriveAuthDiv)
.setName(t("settings_googledrive_auth"))
.setDesc(t("settings_googledrive_auth_desc"))
.addButton(async (button) => {
button.setButtonText(t("settings_googledrive_auth_button"));
button.onClick(async () => {
const modal = new GoogleDriveAuthModal(
app,
plugin,
googleDriveAuthDiv,
googleDriveRevokeAuthDiv,
googleDriveRevokeAuthSetting,
t
);
plugin.oauth2Info.helperModal = modal;
plugin.oauth2Info.authDiv = googleDriveAuthDiv;
plugin.oauth2Info.revokeDiv = googleDriveRevokeAuthDiv;
plugin.oauth2Info.revokeAuthSetting = googleDriveRevokeAuthSetting;
modal.open();
});
});
googleDriveAuthDiv.toggleClass(
"googledrive-auth-button-hide",
plugin.settings.googledrive.refreshToken !== ""
);
googleDriveRevokeAuthDiv.toggleClass(
"googledrive-revoke-auth-button-hide",
plugin.settings.googledrive.refreshToken === ""
);
let newgoogleDriveRemoteBaseDir =
plugin.settings.googledrive.remoteBaseDir || "";
new Setting(googleDriveAllowedToUsedDiv)
.setName(t("settings_remotebasedir"))
.setDesc(t("settings_remotebasedir_desc"))
.addText((text) =>
text
.setPlaceholder(app.vault.getName())
.setValue(newgoogleDriveRemoteBaseDir)
.onChange((value) => {
newgoogleDriveRemoteBaseDir = value.trim();
})
)
.addButton((button) => {
button.setButtonText(t("confirm"));
button.onClick(() => {
new ChangeRemoteBaseDirModal(
app,
plugin,
newgoogleDriveRemoteBaseDir,
"googledrive"
).open();
});
});
new Setting(googleDriveAllowedToUsedDiv)
.setName(t("settings_checkonnectivity"))
.setDesc(t("settings_checkonnectivity_desc"))
.addButton(async (button) => {
button.setButtonText(t("settings_checkonnectivity_button"));
button.onClick(async () => {
new Notice(t("settings_checkonnectivity_checking"));
const client = getClient(plugin.settings, app.vault.getName(), () =>
plugin.saveSettings()
);
const errors = { msg: "" };
const res = await client.checkConnect((err: any) => {
errors.msg = `${err}`;
});
if (res) {
new Notice(t("settings_googledrive_connect_succ"));
} else {
new Notice(t("settings_googledrive_connect_fail"));
new Notice(errors.msg);
}
});
});
return {
googleDriveDiv: googleDriveDiv,
googleDriveAllowedToUsedDiv: googleDriveAllowedToUsedDiv,
googleDriveNotShowUpHintSetting: googleDriveNotShowUpHintSetting,
};
};

386
pro/src/settingsPro.ts Normal file
View File

@ -0,0 +1,386 @@
import cloneDeep from "lodash/cloneDeep";
import { type App, Modal, Notice, Setting } from "obsidian";
import { features } from "process";
import type { TransItemType } from "../../src/i18n";
import type RemotelySavePlugin from "../../src/main";
import { stringToFragment } from "../../src/misc";
import {
DEFAULT_PRO_CONFIG,
generateAuthUrlAndCodeVerifierChallenge,
getAndSaveProEmail,
getAndSaveProFeatures,
sendAuthReq,
setConfigBySuccessfullAuthInplace,
} from "./account";
import {
type FeatureInfo,
PRO_CLIENT_ID,
type ProConfig,
} from "./baseTypesPro";
export class ProAuthModal extends Modal {
readonly plugin: RemotelySavePlugin;
readonly authDiv: HTMLDivElement;
readonly revokeAuthDiv: HTMLDivElement;
readonly revokeAuthSetting: Setting;
readonly proFeaturesListSetting: Setting;
readonly t: (x: TransItemType, vars?: any) => string;
constructor(
app: App,
plugin: RemotelySavePlugin,
authDiv: HTMLDivElement,
revokeAuthDiv: HTMLDivElement,
revokeAuthSetting: Setting,
proFeaturesListSetting: Setting,
t: (x: TransItemType, vars?: any) => string
) {
super(app);
this.plugin = plugin;
this.authDiv = authDiv;
this.revokeAuthDiv = revokeAuthDiv;
this.revokeAuthSetting = revokeAuthSetting;
this.proFeaturesListSetting = proFeaturesListSetting;
this.t = t;
}
async onOpen() {
const { contentEl } = this;
const { authUrl, codeVerifier, codeChallenge } =
await generateAuthUrlAndCodeVerifierChallenge(false);
this.plugin.oauth2Info.verifier = codeVerifier;
const t = this.t;
const div2 = contentEl.createDiv();
div2.createEl(
"button",
{
text: t("modal_proauth_copybutton"),
},
(el) => {
el.onclick = async () => {
await navigator.clipboard.writeText(authUrl);
new Notice(t("modal_proauth_copynotice"));
};
}
);
contentEl.createEl("p").createEl("a", {
href: authUrl,
text: authUrl,
});
// manual paste
let authCode = "";
new Setting(contentEl)
.setName(t("modal_proauth_maualinput"))
.setDesc(t("modal_proauth_maualinput_desc"))
.addText((text) =>
text
.setPlaceholder("")
.setValue("")
.onChange((val) => {
authCode = val.trim();
})
)
.addButton(async (button) => {
button.setButtonText(t("submit"));
button.onClick(async () => {
new Notice(t("modal_proauth_maualinput_notice"));
try {
const authRes = await sendAuthReq(
codeVerifier ?? "verifier",
authCode,
async (e: any) => {
new Notice(t("protocol_pro_connect_fail"));
new Notice(`${e}`);
throw e;
}
);
console.debug(authRes);
const self = this;
setConfigBySuccessfullAuthInplace(
this.plugin.settings.pro!,
authRes!,
() => self.plugin.saveSettings()
);
await getAndSaveProFeatures(
this.plugin.settings.pro!,
this.plugin.manifest.version,
() => self.plugin.saveSettings()
);
this.proFeaturesListSetting.setDesc(
stringToFragment(
t("settings_pro_features_desc", {
features: featureListToText(
this.plugin.settings.pro!.enabledProFeatures
),
})
)
);
await getAndSaveProEmail(
this.plugin.settings.pro!,
this.plugin.manifest.version,
() => self.plugin.saveSettings()
);
new Notice(
t("protocol_pro_connect_manualinput_succ", {
email: this.plugin.settings.pro!.email ?? "(no email)",
})
);
this.plugin.oauth2Info.verifier = ""; // reset it
this.plugin.oauth2Info.authDiv?.toggleClass(
"pro-auth-button-hide",
this.plugin.settings.pro?.refreshToken !== ""
);
this.plugin.oauth2Info.authDiv = undefined;
this.plugin.oauth2Info.revokeAuthSetting?.setDesc(
t("protocol_pro_connect_succ_revoke", {
email: this.plugin.settings.pro?.email,
})
);
this.plugin.oauth2Info.revokeAuthSetting = undefined;
this.plugin.oauth2Info.revokeDiv?.toggleClass(
"pro-revoke-auth-button-hide",
this.plugin.settings.pro?.email === ""
);
this.plugin.oauth2Info.revokeDiv = undefined;
// try to remove data in clipboard
await navigator.clipboard.writeText("");
this.close();
} catch (err) {
console.error(err);
new Notice(t("modal_proauth_maualinput_conn_fail"));
}
});
});
}
onClose() {
const { contentEl } = this;
contentEl.empty();
}
}
export class ProRevokeAuthModal extends Modal {
readonly plugin: RemotelySavePlugin;
readonly authDiv: HTMLDivElement;
readonly revokeAuthDiv: HTMLDivElement;
readonly t: (x: TransItemType, vars?: any) => string;
constructor(
app: App,
plugin: RemotelySavePlugin,
authDiv: HTMLDivElement,
revokeAuthDiv: HTMLDivElement,
t: (x: TransItemType, vars?: any) => string
) {
super(app);
this.plugin = plugin;
this.authDiv = authDiv;
this.revokeAuthDiv = revokeAuthDiv;
this.t = t;
}
async onOpen() {
const { contentEl } = this;
const t = this.t;
contentEl.createEl("p", {
text: t("modal_prorevokeauth"),
});
new Setting(contentEl)
.setName(t("modal_prorevokeauth_clean"))
.setDesc(t("modal_prorevokeauth_clean_desc"))
.addButton(async (button) => {
button.setButtonText(t("modal_prorevokeauth_clean_button"));
button.onClick(async () => {
try {
this.plugin.settings.pro = cloneDeep(DEFAULT_PRO_CONFIG);
await this.plugin.saveSettings();
this.authDiv.toggleClass(
"pro-auth-button-hide",
this.plugin.settings.pro?.refreshToken !== ""
);
this.revokeAuthDiv.toggleClass(
"pro-revoke-auth-button-hide",
this.plugin.settings.pro?.refreshToken === ""
);
new Notice(t("modal_prorevokeauth_clean_notice"));
this.close();
} catch (err) {
console.error(err);
new Notice(t("modal_prorevokeauth_clean_fail"));
}
});
});
}
onClose() {
const { contentEl } = this;
contentEl.empty();
}
}
const featureListToText = (features: FeatureInfo[]) => {
// TODO: i18n
if (features === undefined || features.length === 0) {
return "No features enabled.";
}
return features
.map((x) => {
return `${x.featureName} (expire: ${new Date(
Number(x.expireAtTimeMs)
).toISOString()})`;
})
.join("<br/>");
};
export const generateProSettingsPart = (
proDiv: HTMLDivElement,
t: (x: TransItemType, vars?: any) => string,
app: App,
plugin: RemotelySavePlugin,
saveUpdatedConfigFunc: () => Promise<any> | undefined,
googleDriveAllowedToUsedDiv: HTMLDivElement,
googleDriveNotShowUpHintSetting: Setting
) => {
proDiv
.createEl("h2", { text: t("settings_pro") })
.setAttribute("id", "settings-pro");
proDiv.createEl("div", {
text: stringToFragment(t("settings_pro_tutorial")),
});
const proSelectAuthDiv = proDiv.createDiv();
const proAuthDiv = proSelectAuthDiv.createDiv({
cls: "pro-auth-button-hide settings-auth-related",
});
const proRevokeAuthDiv = proSelectAuthDiv.createDiv({
cls: "pro-revoke-auth-button-hide settings-auth-related",
});
const proFeaturesListSetting = new Setting(proRevokeAuthDiv)
.setName(t("settings_pro_features"))
.setDesc(
stringToFragment(
t("settings_pro_features_desc", {
features: featureListToText(plugin.settings.pro!.enabledProFeatures),
})
)
);
proFeaturesListSetting.addButton(async (button) => {
button.setButtonText(t("settings_pro_features_refresh_button"));
button.onClick(async () => {
new Notice(t("settings_pro_features_refresh_fetch"));
await getAndSaveProFeatures(
plugin.settings.pro!,
plugin.manifest.version,
saveUpdatedConfigFunc
);
proFeaturesListSetting.setDesc(
stringToFragment(
t("settings_pro_features_desc", {
features: featureListToText(
plugin.settings.pro!.enabledProFeatures
),
})
)
);
const allowGoogleDrive =
plugin.settings.pro?.enabledProFeatures.filter(
(x) => x.featureName === "feature-google_drive"
).length === 1;
console.debug(
`allow to show up google drive settings? ${allowGoogleDrive}`
);
if (allowGoogleDrive) {
googleDriveAllowedToUsedDiv.removeClass(
"googledrive-allow-to-use-hide"
);
googleDriveNotShowUpHintSetting.settingEl.addClass(
"googledrive-allow-to-use-hide"
);
} else {
googleDriveAllowedToUsedDiv.addClass("googledrive-allow-to-use-hide");
googleDriveNotShowUpHintSetting.settingEl.removeClass(
"googledrive-allow-to-use-hide"
);
}
new Notice(t("settings_pro_features_refresh_succ"));
});
});
const proRevokeAuthSetting = new Setting(proRevokeAuthDiv)
.setName(t("settings_pro_revoke"))
.setDesc(
t("settings_pro_revoke_desc", {
email: plugin.settings.pro?.email,
})
)
.addButton(async (button) => {
button.setButtonText(t("settings_pro_revoke_button"));
button.onClick(async () => {
new ProRevokeAuthModal(
app,
plugin,
proAuthDiv,
proRevokeAuthDiv,
t
).open();
});
});
new Setting(proAuthDiv)
.setName(t("settings_pro_intro"))
.setDesc(stringToFragment(t("settings_pro_intro_desc")))
.addButton(async (button) => {
button.setButtonText(t("settings_pro_intro_button"));
button.onClick(async () => {
window.open("https://remotelysave.com/user/signupin", "_self");
});
});
new Setting(proAuthDiv)
.setName(t("settings_pro_auth"))
.setDesc(t("settings_pro_auth_desc"))
.addButton(async (button) => {
button.setButtonText(t("settings_pro_auth_button"));
button.onClick(async () => {
const modal = new ProAuthModal(
app,
plugin,
proAuthDiv,
proRevokeAuthDiv,
proRevokeAuthSetting,
proFeaturesListSetting,
t
);
plugin.oauth2Info.helperModal = modal;
plugin.oauth2Info.authDiv = proAuthDiv;
plugin.oauth2Info.revokeDiv = proRevokeAuthDiv;
plugin.oauth2Info.revokeAuthSetting = proRevokeAuthSetting;
modal.open();
});
});
proAuthDiv.toggleClass(
"pro-auth-button-hide",
plugin.settings.pro?.refreshToken !== ""
);
proRevokeAuthDiv.toggleClass(
"pro-revoke-auth-button-hide",
plugin.settings.pro?.refreshToken === ""
);
};

View File

@ -0,0 +1,68 @@
import { deepStrictEqual, rejects, throws } from "assert";
import { getFileRename } from "../src/conflictLogic";
describe("New name is generated", () => {
it("should throw for empty file", async () => {
for (const key of ["", "/", ".", ".."]) {
throws(() => getFileRename(key));
}
});
it("should throw for folder", async () => {
for (const key of ["sss/", "ssss/yyy/"]) {
throws(() => getFileRename(key));
}
});
it("should correctly get no ext files renamed", async () => {
deepStrictEqual(getFileRename("abc"), "abc.dup");
deepStrictEqual(getFileRename("xxxx/yyyy/abc"), "xxxx/yyyy/abc.dup");
});
it("should correctly get dot files renamed", async () => {
deepStrictEqual(getFileRename(".abc"), ".abc.dup");
deepStrictEqual(getFileRename("xxxx/yyyy/.efg"), "xxxx/yyyy/.efg.dup");
deepStrictEqual(getFileRename("xxxx/yyyy/hij."), "xxxx/yyyy/hij.dup");
});
it("should correctly get normal files renamed", async () => {
deepStrictEqual(getFileRename("abc.efg"), "abc.dup.efg");
deepStrictEqual(
getFileRename("xxxx/yyyy/abc.efg"),
"xxxx/yyyy/abc.dup.efg"
);
deepStrictEqual(
getFileRename("xxxx/yyyy/abc.tar.gz"),
"xxxx/yyyy/abc.tar.dup.gz"
);
deepStrictEqual(
getFileRename("xxxx/yyyy/.abc.efg"),
"xxxx/yyyy/.abc.dup.efg"
);
});
it("should correctly get duplicated files renamed again", async () => {
deepStrictEqual(getFileRename("abc.dup"), "abc.dup.dup");
deepStrictEqual(
getFileRename("xxxx/yyyy/.abc.dup"),
"xxxx/yyyy/.abc.dup.dup"
);
deepStrictEqual(
getFileRename("xxxx/yyyy/abc.dup.md"),
"xxxx/yyyy/abc.dup.dup.md"
);
deepStrictEqual(
getFileRename("xxxx/yyyy/.abc.dup.md"),
"xxxx/yyyy/.abc.dup.dup.md"
);
});
});

201
src/LICENSE Normal file
View File

@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

9
src/README.md Normal file
View File

@ -0,0 +1,9 @@
# Main Basic Source
## What?
The main basic source code for Remotely Save.
## License
The codes or files or subfolders inside the current folder (`src` in the repo), are released under "open source" license: "Apache License, version 2.0".

View File

@ -3,17 +3,25 @@
* To avoid circular dependency.
*/
import { Platform, requireApiVersion } from "obsidian";
import type { LangType, LangTypeAndAuto } from "./i18n";
import type { GoogleDriveConfig, ProConfig } from "../pro/src/baseTypesPro";
import type { LangTypeAndAuto } from "./i18n";
export const DEFAULT_CONTENT_TYPE = "application/octet-stream";
export type SUPPORTED_SERVICES_TYPE = "s3" | "webdav" | "dropbox" | "onedrive";
export type SUPPORTED_SERVICES_TYPE =
| "s3"
| "webdav"
| "dropbox"
| "onedrive"
| "webdis"
| "googledrive";
export type SUPPORTED_SERVICES_TYPE_WITH_REMOTE_BASE_DIR =
| "webdav"
| "dropbox"
| "onedrive";
| "onedrive"
| "webdis"
| "googledrive";
export interface S3Config {
s3Endpoint: string;
@ -27,6 +35,9 @@ export interface S3Config {
remotePrefix?: string;
useAccurateMTime?: boolean;
reverseProxyNoSignUrl?: string;
generateFolderObject?: boolean;
/**
* @deprecated
@ -81,6 +92,14 @@ export interface OnedriveConfig {
username: string;
credentialsShouldBeDeletedAtTime?: number;
remoteBaseDir?: string;
emptyFile: "skip" | "error";
}
export interface WebdisConfig {
address: string;
username?: string;
password?: string;
remoteBaseDir?: string;
}
export type SyncDirectionType =
@ -90,13 +109,27 @@ export type SyncDirectionType =
export type CipherMethodType = "rclone-base64" | "openssl-base64" | "unknown";
export type QRExportType = "all_but_oauth2" | "dropbox" | "onedrive";
export type QRExportType =
| "basic_and_advanced"
| "s3"
| "dropbox"
| "onedrive"
| "webdav"
| "webdis"
| "googledrive";
export interface ProfilerConfig {
enablePrinting?: boolean;
recordSize?: boolean;
}
export interface RemotelySavePluginSettings {
s3: S3Config;
webdav: WebdavConfig;
dropbox: DropboxConfig;
onedrive: OnedriveConfig;
webdis: WebdisConfig;
googledrive: GoogleDriveConfig;
password: string;
serviceType: SUPPORTED_SERVICES_TYPE;
currLogLevel?: string;
@ -125,6 +158,10 @@ export interface RemotelySavePluginSettings {
encryptionMethod?: CipherMethodType;
profiler?: ProfilerConfig;
pro?: ProConfig;
/**
* @deprecated
*/
@ -158,7 +195,10 @@ export const OAUTH2_FORCE_EXPIRE_MILLISECONDS = 1000 * 60 * 60 * 24 * 80;
export type EmptyFolderCleanType = "skip" | "clean_both";
export type ConflictActionType = "keep_newer" | "keep_larger" | "rename_both";
export type ConflictActionType =
| "keep_newer"
| "keep_larger"
| "smart_conflict";
export type DecisionTypeForMixedEntity =
| "only_history"
@ -173,11 +213,11 @@ export type DecisionTypeForMixedEntity =
| "remote_is_deleted_thus_also_delete_local"
| "conflict_created_then_keep_local"
| "conflict_created_then_keep_remote"
| "conflict_created_then_keep_both"
| "conflict_created_then_smart_conflict"
| "conflict_created_then_do_nothing"
| "conflict_modified_then_keep_local"
| "conflict_modified_then_keep_remote"
| "conflict_modified_then_keep_both"
| "conflict_modified_then_smart_conflict"
| "folder_existed_both_then_do_nothing"
| "folder_existed_local_then_also_create_remote"
| "folder_existed_remote_then_also_create_local"
@ -207,6 +247,7 @@ export interface Entity {
hash?: string;
etag?: string;
synthesizedFolder?: boolean;
synthesizedFile?: boolean;
}
export interface UploadedType {
@ -227,6 +268,8 @@ export interface MixedEntity {
decision?: DecisionTypeForMixedEntity;
conflictAction?: ConflictActionType;
change?: boolean;
sideNotes?: any;
}
@ -258,15 +301,6 @@ export interface FileOrFolderMixedState {
deltimeRemoteFmt?: string;
}
export const API_VER_STAT_FOLDER = "0.13.27";
export const API_VER_REQURL = "0.13.26"; // desktop ver 0.13.26, iOS ver 1.1.1
export const API_VER_REQURL_ANDROID = "0.14.6"; // Android ver 1.2.1
export const API_VER_ENSURE_REQURL_OK = "1.0.0"; // always bypass CORS here
export const VALID_REQURL =
(!Platform.isAndroidApp && requireApiVersion(API_VER_REQURL)) ||
(Platform.isAndroidApp && requireApiVersion(API_VER_REQURL_ANDROID));
export const DEFAULT_DEBUG_FOLDER = "_debug_remotely_save/";
export const DEFAULT_SYNC_PLANS_HISTORY_FILE_PREFIX =
"sync_plans_hist_exported_on_";

14
src/baseTypesObs.ts Normal file
View File

@ -0,0 +1,14 @@
/**
* Every utils requiring Obsidian is placed here.
*/
import { Platform, requireApiVersion } from "obsidian";
export const API_VER_STAT_FOLDER = "0.13.27";
export const API_VER_REQURL = "0.13.26"; // desktop ver 0.13.26, iOS ver 1.1.1
export const API_VER_REQURL_ANDROID = "0.14.6"; // Android ver 1.2.1
export const API_VER_ENSURE_REQURL_OK = "1.0.0"; // always bypass CORS here
export const VALID_REQURL =
(!Platform.isAndroidApp && requireApiVersion(API_VER_REQURL)) ||
(Platform.isAndroidApp && requireApiVersion(API_VER_REQURL_ANDROID));

View File

@ -1,4 +1,4 @@
import { base64, base64url } from "rfc4648";
import { base64url } from "rfc4648";
import { reverseString } from "./misc";
import type { RemotelySavePluginSettings } from "./baseTypes";

60
src/copyLogic.ts Normal file
View File

@ -0,0 +1,60 @@
import type { FakeFs } from "./fsAll";
export async function copyFolder(key: string, left: FakeFs, right: FakeFs) {
if (!key.endsWith("/")) {
throw Error(`should not call ${key} in copyFolder`);
}
const statsLeft = await left.stat(key);
const entity = await right.mkdir(key, statsLeft.mtimeCli);
return {
entity: entity,
content: undefined,
};
}
export async function copyFile(key: string, left: FakeFs, right: FakeFs) {
// console.debug(`copyFile: key=${key}, left=${left.kind}, right=${right.kind}`);
if (key.endsWith("/")) {
throw Error(`should not call ${key} in copyFile`);
}
const statsLeft = await left.stat(key);
const content = await left.readFile(key);
if (statsLeft.size === undefined || statsLeft.size === 0) {
// some weird bugs on android not returning size. just ignore them
statsLeft.size = content.byteLength;
} else {
if (statsLeft.size !== content.byteLength) {
throw Error(
`error copying ${left.kind}=>${right.kind}: size not matched`
);
}
}
if (statsLeft.mtimeCli === undefined) {
throw Error(`error copying ${left.kind}=>${right.kind}, no mtimeCli`);
}
// console.debug(`copyFile: about to start right.writeFile`);
return {
entity: await right.writeFile(
key,
content,
statsLeft.mtimeCli,
statsLeft.mtimeCli /* TODO */
),
content: content,
};
}
export async function copyFileOrFolder(
key: string,
left: FakeFs,
right: FakeFs
) {
if (key.endsWith("/")) {
return await copyFolder(key, left, right);
} else {
return await copyFile(key, left, right);
}
}

View File

@ -1,22 +1,40 @@
import { TAbstractFile, TFolder, TFile, Vault } from "obsidian";
import type { Vault } from "obsidian";
import {
readAllProfilerResultsByVault,
readAllSyncPlanRecordTextsByVault,
} from "./localdb";
import type { InternalDBs } from "./localdb";
import { mkdirpInVault, unixTimeToStr } from "./misc";
import {
DEFAULT_DEBUG_FOLDER,
DEFAULT_PROFILER_RESULT_FILE_PREFIX,
DEFAULT_SYNC_PLANS_HISTORY_FILE_PREFIX,
} from "./baseTypes";
import {
readAllProfilerResultsByVault,
readAllSyncPlanRecordTextsByVault,
} from "./localdb";
import type { InternalDBs } from "./localdb";
import { mkdirpInVault } from "./misc";
import type { SyncPlanType } from "./sync";
const getSubsetOfSyncPlan = (x: string, onlyChange: boolean) => {
if (!onlyChange) {
return x;
}
const y: SyncPlanType = JSON.parse(x);
const z: SyncPlanType = Object.fromEntries(
Object.entries(y).filter(([key, val]) => {
if (key === "/$@meta") {
return true;
}
return val.change === undefined || val.change === true;
})
);
return JSON.stringify(z, null, 2);
};
export const exportVaultSyncPlansToFiles = async (
db: InternalDBs,
vault: Vault,
vaultRandomID: string,
howMany: number
howMany: number,
onlyChange: boolean
) => {
console.info("exporting sync plans");
await mkdirpInVault(DEFAULT_DEBUG_FOLDER, vault);
@ -28,12 +46,18 @@ export const exportVaultSyncPlansToFiles = async (
if (howMany <= 0) {
md =
"Sync plans found:\n\n" +
records.map((x) => "```json\n" + x + "\n```\n").join("\n");
records
.map(
(x) => "```json\n" + getSubsetOfSyncPlan(x, onlyChange) + "\n```\n"
)
.join("\n");
} else {
md =
"Sync plans found:\n\n" +
records
.map((x) => "```json\n" + x + "\n```\n")
.map(
(x) => "```json\n" + getSubsetOfSyncPlan(x, onlyChange) + "\n```\n"
)
.slice(0, howMany)
.join("\n");
}

View File

@ -39,7 +39,7 @@ export const encryptArrayBuffer = async (
arrBuf: ArrayBuffer,
password: string,
rounds: number = DEFAULT_ITER,
saltHex: string = ""
saltHex = ""
) => {
let salt: Uint8Array;
if (saltHex !== "") {
@ -109,7 +109,7 @@ export const encryptStringToBase32 = async (
text: string,
password: string,
rounds: number = DEFAULT_ITER,
saltHex: string = ""
saltHex = ""
) => {
const enc = await encryptArrayBuffer(
bufferToArrayBuffer(new TextEncoder().encode(text)),
@ -138,7 +138,7 @@ export const encryptStringToBase64url = async (
text: string,
password: string,
rounds: number = DEFAULT_ITER,
saltHex: string = ""
saltHex = ""
) => {
const enc = await encryptArrayBuffer(
bufferToArrayBuffer(new TextEncoder().encode(text)),

View File

@ -1,5 +1,5 @@
import { nanoid } from "nanoid";
import { Cipher as CipherRCloneCryptPack } from "@fyears/rclone-crypt";
import { nanoid } from "nanoid";
const ctx: WorkerGlobalScope = self as any;

View File

@ -1,215 +0,0 @@
import { CipherMethodType } from "./baseTypes";
import * as openssl from "./encryptOpenSSL";
import * as rclone from "./encryptRClone";
import { isVaildText } from "./misc";
export class Cipher {
readonly password: string;
readonly method: CipherMethodType;
cipherRClone?: rclone.CipherRclone;
constructor(password: string, method: CipherMethodType) {
this.password = password ?? "";
this.method = method;
if (method === "rclone-base64") {
this.cipherRClone = new rclone.CipherRclone(password, 5);
}
}
closeResources() {
if (this.method === "rclone-base64" && this.cipherRClone !== undefined) {
this.cipherRClone.closeResources();
}
}
isPasswordEmpty() {
return this.password === "";
}
isFolderAware() {
if (this.method === "openssl-base64") {
return false;
}
if (this.method === "rclone-base64") {
return true;
}
throw Error(`no idea about isFolderAware for method=${this.method}`);
}
async encryptContent(content: ArrayBuffer) {
// console.debug("start encryptContent");
if (this.password === "") {
return content;
}
if (this.method === "openssl-base64") {
const res = await openssl.encryptArrayBuffer(content, this.password);
if (res === undefined) {
throw Error(`cannot encrypt content`);
}
return res;
} else if (this.method === "rclone-base64") {
const res =
await this.cipherRClone!.encryptContentByCallingWorker(content);
if (res === undefined) {
throw Error(`cannot encrypt content`);
}
return res;
} else {
throw Error(`not supported encrypt method=${this.method}`);
}
}
async decryptContent(content: ArrayBuffer) {
// console.debug("start decryptContent");
if (this.password === "") {
return content;
}
if (this.method === "openssl-base64") {
const res = await openssl.decryptArrayBuffer(content, this.password);
if (res === undefined) {
throw Error(`cannot decrypt content`);
}
return res;
} else if (this.method === "rclone-base64") {
const res =
await this.cipherRClone!.decryptContentByCallingWorker(content);
if (res === undefined) {
throw Error(`cannot decrypt content`);
}
return res;
} else {
throw Error(`not supported decrypt method=${this.method}`);
}
}
async encryptName(name: string) {
// console.debug("start encryptName");
if (this.password === "") {
return name;
}
if (this.method === "openssl-base64") {
const res = await openssl.encryptStringToBase64url(name, this.password);
if (res === undefined) {
throw Error(`cannot encrypt name=${name}`);
}
return res;
} else if (this.method === "rclone-base64") {
const res = await this.cipherRClone!.encryptNameByCallingWorker(name);
if (res === undefined) {
throw Error(`cannot encrypt name=${name}`);
}
return res;
} else {
throw Error(`not supported encrypt method=${this.method}`);
}
}
async decryptName(name: string): Promise<string> {
// console.debug("start decryptName");
if (this.password === "") {
return name;
}
if (this.method === "openssl-base64") {
if (name.startsWith(openssl.MAGIC_ENCRYPTED_PREFIX_BASE32)) {
// backward compitable with the openssl-base32
try {
const res = await openssl.decryptBase32ToString(name, this.password);
if (res !== undefined && isVaildText(res)) {
return res;
} else {
throw Error(`cannot decrypt name=${name}`);
}
} catch (error) {
throw Error(`cannot decrypt name=${name}`);
}
} else if (name.startsWith(openssl.MAGIC_ENCRYPTED_PREFIX_BASE64URL)) {
try {
const res = await openssl.decryptBase64urlToString(
name,
this.password
);
if (res !== undefined && isVaildText(res)) {
return res;
} else {
throw Error(`cannot decrypt name=${name}`);
}
} catch (error) {
throw Error(`cannot decrypt name=${name}`);
}
} else {
throw Error(
`method=${this.method} but the name=${name}, likely mismatch`
);
}
} else if (this.method === "rclone-base64") {
const res = await this.cipherRClone!.decryptNameByCallingWorker(name);
if (res === undefined) {
throw Error(`cannot decrypt name=${name}`);
}
return res;
} else {
throw Error(`not supported decrypt method=${this.method}`);
}
}
getSizeFromOrigToEnc(x: number) {
if (this.password === "") {
return x;
}
if (this.method === "openssl-base64") {
return openssl.getSizeFromOrigToEnc(x);
} else if (this.method === "rclone-base64") {
return rclone.getSizeFromOrigToEnc(x);
} else {
throw Error(`not supported encrypt method=${this.method}`);
}
}
/**
* quick guess, no actual decryption here
* @param name
* @returns
*/
static isLikelyOpenSSLEncryptedName(name: string): boolean {
if (
name.startsWith(openssl.MAGIC_ENCRYPTED_PREFIX_BASE32) ||
name.startsWith(openssl.MAGIC_ENCRYPTED_PREFIX_BASE64URL)
) {
return true;
}
return false;
}
/**
* quick guess, no actual decryption here
* @param name
* @returns
*/
static isLikelyEncryptedName(name: string): boolean {
return Cipher.isLikelyOpenSSLEncryptedName(name);
}
/**
* quick guess, no actual decryption here, only openssl can be guessed here
* @param name
* @returns
*/
static isLikelyEncryptedNameNotMatchMethod(
name: string,
method: CipherMethodType
): boolean {
if (
Cipher.isLikelyOpenSSLEncryptedName(name) &&
method !== "openssl-base64"
) {
return true;
}
if (
!Cipher.isLikelyOpenSSLEncryptedName(name) &&
method === "openssl-base64"
) {
return true;
}
return false;
}
}

22
src/fsAll.ts Normal file
View File

@ -0,0 +1,22 @@
import type { Entity } from "./baseTypes";
export abstract class FakeFs {
abstract kind: string;
abstract walk(): Promise<Entity[]>;
abstract walkPartial(): Promise<Entity[]>;
abstract stat(key: string): Promise<Entity>;
abstract mkdir(key: string, mtime?: number, ctime?: number): Promise<Entity>;
abstract writeFile(
key: string,
content: ArrayBuffer,
mtime: number,
ctime: number
): Promise<Entity>;
abstract readFile(key: string): Promise<ArrayBuffer>;
abstract rename(key1: string, key2: string): Promise<void>;
abstract rm(key: string): Promise<void>;
abstract checkConnect(callbackFunc?: any): Promise<boolean>;
abstract getUserDisplayName(): Promise<string>;
abstract revokeAuth(): Promise<any>;
abstract allowEmptyFile(): boolean;
}

View File

@ -1,25 +1,21 @@
import { Dropbox, DropboxAuth } from "dropbox";
import type { files, DropboxResponseError, DropboxResponse } from "dropbox";
import { Vault } from "obsidian";
import * as path from "path";
import type { DropboxResponse, DropboxResponseError, files } from "dropbox";
import random from "lodash/random";
import {
DropboxConfig,
Entity,
COMMAND_CALLBACK_DROPBOX,
type DropboxConfig,
type Entity,
OAUTH2_FORCE_EXPIRE_MILLISECONDS,
UploadedType,
} from "./baseTypes";
import { FakeFs } from "./fsAll";
import {
bufferToArrayBuffer,
delay,
fixEntityListCasesInplace,
getFolderLevels,
getParentFolder,
hasEmojiInText,
headersToRecord,
mkdirpInVault,
} from "./misc";
import { Cipher } from "./encryptUnified";
import { random } from "lodash";
export { Dropbox } from "dropbox";
@ -34,10 +30,7 @@ export const DEFAULT_DROPBOX_CONFIG: DropboxConfig = {
credentialsShouldBeDeletedAtTime: 0,
};
export const getDropboxPath = (
fileOrFolderPath: string,
remoteBaseDir: string
) => {
const getDropboxPath = (fileOrFolderPath: string, remoteBaseDir: string) => {
let key = fileOrFolderPath;
if (fileOrFolderPath === "/" || fileOrFolderPath === "") {
// special
@ -84,20 +77,22 @@ const fromDropboxItemToEntity = (
if (x[".tag"] === "folder") {
return {
key: key,
keyRaw: key,
size: 0,
sizeRaw: 0,
etag: `${x.id}\t`,
} as Entity;
} else if (x[".tag"] === "file") {
const mtimeCli = Date.parse(x.client_modified).valueOf();
const mtimeSvr = Date.parse(x.server_modified).valueOf();
return {
key: key,
keyRaw: key,
mtimeCli: mtimeCli,
mtimeSvr: mtimeSvr,
size: x.size,
sizeRaw: x.size,
hash: x.content_hash,
etag: `${x.id}\t${x.content_hash}`,
} as Entity;
} else {
// x[".tag"] === "deleted"
@ -105,6 +100,132 @@ const fromDropboxItemToEntity = (
}
};
/**
* https://github.com/remotely-save/remotely-save/issues/567
* https://www.dropboxforum.com/t5/Dropbox-API-Support-Feedback/Case-Sensitivity-in-API-2/td-p/191279
* @param entities
*/
export const fixEntityListCasesInplace = (entities: { key?: string }[]) => {
for (const iterator of entities) {
if (iterator.key === undefined) {
throw Error(`dropbox list should all have key, but meet undefined`);
}
}
entities.sort((a, b) => a.key!.length - b.key!.length);
// console.log(JSON.stringify(entities,null,2));
const caseMapping: Record<string, string> = { "": "" };
for (const e of entities) {
// console.log(`looking for: ${JSON.stringify(e, null, 2)}`);
let parentFolder = getParentFolder(e.key!);
if (parentFolder === "/") {
parentFolder = "";
}
const parentFolderLower = parentFolder.toLocaleLowerCase();
const segs = e.key!.split("/");
if (e.key!.endsWith("/")) {
// folder
if (caseMapping.hasOwnProperty(parentFolderLower)) {
const newKey = `${caseMapping[parentFolderLower]}${segs
.slice(-2)
.join("/")}`;
caseMapping[newKey.toLocaleLowerCase()] = newKey;
e.key = newKey;
// console.log(JSON.stringify(caseMapping,null,2));
// continue;
} else {
throw Error(`${parentFolder} doesn't have cases record??`);
}
} else {
// file
if (caseMapping.hasOwnProperty(parentFolderLower)) {
const newKey = `${caseMapping[parentFolderLower]}${segs
.slice(-1)
.join("/")}`;
e.key = newKey;
// continue;
} else {
throw Error(`${parentFolder} doesn't have cases record??`);
}
}
}
return entities;
};
////////////////////////////////////////////////////////////////////////////////
// Other usual common methods
////////////////////////////////////////////////////////////////////////////////
interface ErrSubType {
error: {
retry_after: number;
};
}
async function retryReq<T>(
reqFunc: () => Promise<DropboxResponse<T>>,
extraHint = ""
): Promise<DropboxResponse<T> | undefined> {
const waitSeconds = [1, 2, 4, 8]; // hard code exponential backoff
for (let idx = 0; idx < waitSeconds.length; ++idx) {
try {
if (idx !== 0) {
console.warn(
`${extraHint === "" ? "" : extraHint + ": "}The ${
idx + 1
}-th try starts at time ${Date.now()}`
);
}
return await reqFunc();
} catch (e: unknown) {
const err = e as DropboxResponseError<ErrSubType>;
if (err.status === undefined) {
// then the err is not DropboxResponseError
throw err;
}
if (err.status !== 429) {
// then the err is not "too many requests", give up
throw err;
}
if (idx === waitSeconds.length - 1) {
// the last retry also failed, give up
throw new Error(
`${
extraHint === "" ? "" : extraHint + ": "
}"429 too many requests", after retrying for ${
idx + 1
} times still failed.`
);
}
const headers = headersToRecord(err.headers);
const svrSec =
err.error.error.retry_after ||
Number.parseInt(headers["retry-after"] || "1") ||
1;
const fallbackSec = waitSeconds[idx];
const secMin = Math.max(svrSec, fallbackSec);
const secMax = Math.max(secMin * 1.8, 2);
console.warn(
`${
extraHint === "" ? "" : extraHint + ": "
}We have "429 too many requests" error of ${
idx + 1
}-th try, at time ${Date.now()}, and wait for ${secMin} ~ ${secMax} seconds to retry. Original info: ${JSON.stringify(
err.error,
null,
2
)}`
);
await delay(random(secMin * 1000, secMax * 1000));
}
}
}
////////////////////////////////////////////////////////////////////////////////
// Dropbox authorization using PKCE
// see https://dropbox.tech/developers/pkce--what-and-why-
@ -112,7 +233,7 @@ const fromDropboxItemToEntity = (
export const getAuthUrlAndVerifier = async (
appKey: string,
needManualPatse: boolean = false
needManualPatse = false
) => {
const auth = new DropboxAuth({
clientId: appKey,
@ -207,9 +328,9 @@ export const setConfigBySuccessfullAuthInplace = async (
console.info("start updating local info of Dropbox token");
config.accessToken = authRes.access_token;
config.accessTokenExpiresInSeconds = parseInt(authRes.expires_in);
config.accessTokenExpiresInSeconds = Number.parseInt(authRes.expires_in);
config.accessTokenExpiresAtTime =
Date.now() + parseInt(authRes.expires_in) * 1000 - 10 * 1000;
Date.now() + Number.parseInt(authRes.expires_in) * 1000 - 10 * 1000;
// manually set it expired after 80 days;
config.credentialsShouldBeDeletedAtTime =
@ -228,94 +349,33 @@ export const setConfigBySuccessfullAuthInplace = async (
};
////////////////////////////////////////////////////////////////////////////////
// Other usual common methods
// real exported interface
////////////////////////////////////////////////////////////////////////////////
interface ErrSubType {
error: {
retry_after: number;
};
}
async function retryReq<T>(
reqFunc: () => Promise<DropboxResponse<T>>,
extraHint: string = ""
): Promise<DropboxResponse<T> | undefined> {
const waitSeconds = [1, 2, 4, 8]; // hard code exponential backoff
for (let idx = 0; idx < waitSeconds.length; ++idx) {
try {
if (idx !== 0) {
console.warn(
`${extraHint === "" ? "" : extraHint + ": "}The ${
idx + 1
}-th try starts at time ${Date.now()}`
);
}
return await reqFunc();
} catch (e: unknown) {
const err = e as DropboxResponseError<ErrSubType>;
if (err.status === undefined) {
// then the err is not DropboxResponseError
throw err;
}
if (err.status !== 429) {
// then the err is not "too many requests", give up
throw err;
}
if (idx === waitSeconds.length - 1) {
// the last retry also failed, give up
throw new Error(
`${
extraHint === "" ? "" : extraHint + ": "
}"429 too many requests", after retrying for ${
idx + 1
} times still failed.`
);
}
const headers = headersToRecord(err.headers);
const svrSec =
err.error.error.retry_after ||
parseInt(headers["retry-after"] || "1") ||
1;
const fallbackSec = waitSeconds[idx];
const secMin = Math.max(svrSec, fallbackSec);
const secMax = Math.max(secMin * 1.8, 2);
console.warn(
`${
extraHint === "" ? "" : extraHint + ": "
}We have "429 too many requests" error of ${
idx + 1
}-th try, at time ${Date.now()}, and wait for ${secMin} ~ ${secMax} seconds to retry. Original info: ${JSON.stringify(
err.error,
null,
2
)}`
);
await delay(random(secMin * 1000, secMax * 1000));
}
}
}
export class WrappedDropboxClient {
export class FakeFsDropbox extends FakeFs {
kind: "dropbox";
dropboxConfig: DropboxConfig;
remoteBaseDir: string;
saveUpdatedConfigFunc: () => Promise<any>;
dropbox!: Dropbox;
vaultFolderExists: boolean;
foldersCreatedBefore: Set<string>;
constructor(
dropboxConfig: DropboxConfig,
remoteBaseDir: string,
vaultName: string,
saveUpdatedConfigFunc: () => Promise<any>
) {
super();
this.kind = "dropbox";
this.dropboxConfig = dropboxConfig;
this.remoteBaseDir = remoteBaseDir;
this.remoteBaseDir = this.dropboxConfig.remoteBaseDir || vaultName || "";
this.saveUpdatedConfigFunc = saveUpdatedConfigFunc;
this.vaultFolderExists = false;
this.foldersCreatedBefore = new Set();
}
init = async () => {
async _init() {
// check token
if (
this.dropboxConfig.accessToken === "" ||
@ -388,389 +448,325 @@ export class WrappedDropboxClient {
}
}
return this.dropbox;
};
}
/**
* @param dropboxConfig
* @returns
*/
export const getDropboxClient = (
dropboxConfig: DropboxConfig,
remoteBaseDir: string,
saveUpdatedConfigFunc: () => Promise<any>
) => {
return new WrappedDropboxClient(
dropboxConfig,
remoteBaseDir,
saveUpdatedConfigFunc
);
};
export const getRemoteMeta = async (
client: WrappedDropboxClient,
remotePath: string
) => {
await client.init();
// if (remotePath === "" || remotePath === "/") {
// // filesGetMetadata doesn't support root folder
// // we instead try to list files
// // if no error occurs, we ensemble a fake result.
// const rsp = await retryReq(() =>
// client.dropbox.filesListFolder({
// path: `/${client.remoteBaseDir}`,
// recursive: false, // don't need to recursive here
// })
// );
// if (rsp.status !== 200) {
// throw Error(JSON.stringify(rsp));
// }
// return {
// key: remotePath,
// lastModified: undefined,
// size: 0,
// remoteType: "dropbox",
// etag: undefined,
// } as Entity;
// }
const rsp = await retryReq(() =>
client.dropbox.filesGetMetadata({
path: remotePath,
})
);
if (rsp === undefined) {
throw Error("dropbox.filesGetMetadata undefinded");
return this;
}
if (rsp.status !== 200) {
throw Error(JSON.stringify(rsp));
async walk(): Promise<Entity[]> {
return await this._walk(false);
}
return fromDropboxItemToEntity(rsp.result, client.remoteBaseDir);
};
export const uploadToRemote = async (
client: WrappedDropboxClient,
fileOrFolderPath: string,
vault: Vault | undefined,
isRecursively: boolean,
cipher: Cipher,
remoteEncryptedKey: string = "",
foldersCreatedBefore: Set<string> | undefined = undefined,
uploadRaw: boolean = false,
rawContent: string | ArrayBuffer = "",
rawContentMTime: number = 0,
rawContentCTime: number = 0
): Promise<UploadedType> => {
await client.init();
async walkPartial(): Promise<Entity[]> {
return await this._walk(true);
}
let uploadFile = fileOrFolderPath;
if (!cipher.isPasswordEmpty()) {
if (remoteEncryptedKey === undefined || remoteEncryptedKey === "") {
throw Error(
`uploadToRemote(dropbox) you have password but remoteEncryptedKey is empty!`
async _walk(partial: boolean): Promise<Entity[]> {
await this._init();
let res = await this.dropbox.filesListFolder({
path: `/${this.remoteBaseDir}`,
recursive: !partial,
include_deleted: false,
limit: partial ? 10 : 1000,
});
if (res.status !== 200) {
throw Error(JSON.stringify(res));
}
// console.info(res);
const contents = res.result.entries;
const unifiedContents = contents
.filter((x) => x[".tag"] !== "deleted")
.filter((x) => x.path_display !== `/${this.remoteBaseDir}`)
.map((x) => fromDropboxItemToEntity(x, this.remoteBaseDir));
if (!partial) {
while (res.result.has_more) {
res = await this.dropbox.filesListFolderContinue({
cursor: res.result.cursor,
});
if (res.status !== 200) {
throw Error(JSON.stringify(res));
}
const contents2 = res.result.entries;
const unifiedContents2 = contents2
.filter((x) => x[".tag"] !== "deleted")
.filter((x) => x.path_display !== `/${this.remoteBaseDir}`)
.map((x) => fromDropboxItemToEntity(x, this.remoteBaseDir));
unifiedContents.push(...unifiedContents2);
}
}
fixEntityListCasesInplace(unifiedContents);
return unifiedContents;
}
async stat(key: string): Promise<Entity> {
await this._init();
return await this._statFromRoot(getDropboxPath(key, this.remoteBaseDir));
}
async _statFromRoot(key: string): Promise<Entity> {
// if (key === "" || key === "/") {
// // filesGetMetadata doesn't support root folder
// // we instead try to list files
// // if no error occurs, we ensemble a fake result.
// const rsp = await retryReq(() =>
// client.dropbox.filesListFolder({
// path: `/${client.key}`,
// recursive: false, // don't need to recursive here
// })
// );
// if (rsp.status !== 200) {
// throw Error(JSON.stringify(rsp));
// }
// return {
// key: remotePath,
// lastModified: undefined,
// size: 0,
// remoteType: "dropbox",
// etag: undefined,
// } as Entity;
// }
const rsp = await retryReq(() =>
this.dropbox.filesGetMetadata({
path: key,
})
);
if (rsp === undefined) {
throw Error("dropbox.filesGetMetadata undefinded");
}
if (rsp.status !== 200) {
throw Error(JSON.stringify(rsp));
}
return fromDropboxItemToEntity(rsp.result, this.remoteBaseDir);
}
async mkdir(key: string, mtime?: number, ctime?: number): Promise<Entity> {
if (!key.endsWith("/")) {
throw Error(`you should not call mkdir on ${key}`);
}
await this._init();
const uploadFile = getDropboxPath(key, this.remoteBaseDir);
return await this._mkdirFromRoot(uploadFile, mtime, ctime);
}
async _mkdirFromRoot(
key: string,
mtime?: number,
ctime?: number
): Promise<Entity> {
if (hasEmojiInText(key)) {
throw new Error(
`${key}: Error: Dropbox does not support emoji in file / folder names.`
);
}
uploadFile = remoteEncryptedKey;
if (this.foldersCreatedBefore?.has(key)) {
// created, pass
} else {
try {
await retryReq(
() =>
this.dropbox.filesCreateFolderV2({
path: key,
}),
key // just a hint
);
this.foldersCreatedBefore?.add(key);
} catch (e: unknown) {
const err = e as DropboxResponseError<files.CreateFolderError>;
if (err.status === undefined) {
throw err;
}
if (err.status === 409) {
// pass
this.foldersCreatedBefore?.add(key);
} else {
throw err;
}
}
}
return await this._statFromRoot(key);
}
uploadFile = getDropboxPath(uploadFile, client.remoteBaseDir);
if (hasEmojiInText(uploadFile)) {
throw new Error(
`${uploadFile}: Error: Dropbox does not support emoji in file / folder names.`
async writeFile(
key: string,
content: ArrayBuffer,
mtime: number,
ctime: number
): Promise<Entity> {
if (key.endsWith("/")) {
throw Error(`you should not call writeFile on ${key}`);
}
await this._init();
const uploadFile = getDropboxPath(key, this.remoteBaseDir);
return await this._writeFileFromRoot(
uploadFile,
content,
mtime,
ctime,
key
);
}
let mtime = 0;
let ctime = 0;
const s = await vault?.adapter?.stat(fileOrFolderPath);
if (s !== undefined && s !== null) {
mtime = Math.floor(s.mtime / 1000.0) * 1000;
ctime = Math.floor(s.ctime / 1000.0) * 1000;
}
const mtimeStr = new Date(mtime).toISOString().replace(/\.\d{3}Z$/, "Z");
const isFolder = fileOrFolderPath.endsWith("/");
if (isFolder && isRecursively) {
throw Error("upload function doesn't implement recursive function yet!");
} else if (isFolder && !isRecursively) {
if (uploadRaw) {
throw Error(`you specify uploadRaw, but you also provide a folder key!`);
}
// folder
if (cipher.isPasswordEmpty() || cipher.isFolderAware()) {
// if not encrypted, || encrypted isFolderAware, mkdir a remote folder
if (foldersCreatedBefore?.has(uploadFile)) {
// created, pass
} else {
try {
await retryReq(
() =>
client.dropbox.filesCreateFolderV2({
path: uploadFile,
}),
fileOrFolderPath
);
foldersCreatedBefore?.add(uploadFile);
} catch (e: unknown) {
const err = e as DropboxResponseError<files.CreateFolderError>;
if (err.status === undefined) {
throw err;
}
if (err.status === 409) {
// pass
foldersCreatedBefore?.add(uploadFile);
} else {
throw err;
}
}
}
const res = await getRemoteMeta(client, uploadFile);
return {
entity: res,
mtimeCli: mtime,
};
} else {
// if encrypted && !isFolderAware(),
// upload a fake file with the encrypted file name
await retryReq(
() =>
client.dropbox.filesUpload({
path: uploadFile,
contents: "",
client_modified: mtimeStr,
}),
fileOrFolderPath
async _writeFileFromRoot(
key: string,
content: ArrayBuffer,
mtime: number,
ctime: number,
origKey: string
): Promise<Entity> {
if (hasEmojiInText(origKey)) {
throw new Error(
`${origKey}: Error: Dropbox does not support emoji in file / folder names.`
);
return {
entity: await getRemoteMeta(client, uploadFile),
mtimeCli: mtime,
};
}
} else {
// file
// we ignore isRecursively parameter here
let localContent = undefined;
if (uploadRaw) {
if (typeof rawContent === "string") {
localContent = new TextEncoder().encode(rawContent).buffer;
} else {
localContent = rawContent;
}
} else {
if (vault === undefined) {
throw new Error(
`the vault variable is not passed but we want to read ${fileOrFolderPath} for Dropbox`
);
}
localContent = await vault.adapter.readBinary(fileOrFolderPath);
}
let remoteContent = localContent;
if (!cipher.isPasswordEmpty()) {
remoteContent = await cipher.encryptContent(localContent);
}
const mtimeFixed = Math.floor(mtime / 1000.0) * 1000;
const ctimeFixed = Math.floor(ctime / 1000.0) * 1000;
const mtimeStr = new Date(mtimeFixed)
.toISOString()
.replace(/\.\d{3}Z$/, "Z");
// in dropbox, we don't need to create folders before uploading! cool!
// TODO: filesUploadSession for larger files (>=150 MB)
await retryReq(
() =>
client.dropbox.filesUpload({
path: uploadFile,
contents: remoteContent,
this.dropbox.filesUpload({
path: key,
contents: content,
mode: {
".tag": "overwrite",
},
client_modified: mtimeStr,
}),
fileOrFolderPath
origKey // hint
);
// we want to mark that parent folders are created
if (foldersCreatedBefore !== undefined) {
const dirs = getFolderLevels(uploadFile).map((x) =>
getDropboxPath(x, client.remoteBaseDir)
if (this.foldersCreatedBefore !== undefined) {
const dirs = getFolderLevels(origKey).map((x) =>
getDropboxPath(x, this.remoteBaseDir)
);
for (const dir of dirs) {
foldersCreatedBefore?.add(dir);
this.foldersCreatedBefore?.add(dir);
}
}
return {
entity: await getRemoteMeta(client, uploadFile),
mtimeCli: mtime,
};
return await this._statFromRoot(key);
}
};
export const listAllFromRemote = async (client: WrappedDropboxClient) => {
await client.init();
let res = await client.dropbox.filesListFolder({
path: `/${client.remoteBaseDir}`,
recursive: true,
include_deleted: false,
limit: 1000,
});
if (res.status !== 200) {
throw Error(JSON.stringify(res));
}
// console.info(res);
const contents = res.result.entries;
const unifiedContents = contents
.filter((x) => x[".tag"] !== "deleted")
.filter((x) => x.path_display !== `/${client.remoteBaseDir}`)
.map((x) => fromDropboxItemToEntity(x, client.remoteBaseDir));
while (res.result.has_more) {
res = await client.dropbox.filesListFolderContinue({
cursor: res.result.cursor,
});
if (res.status !== 200) {
throw Error(JSON.stringify(res));
async readFile(key: string): Promise<ArrayBuffer> {
await this._init();
if (key.endsWith("/")) {
throw new Error(`you should not call readFile on folder ${key}`);
}
const contents2 = res.result.entries;
const unifiedContents2 = contents2
.filter((x) => x[".tag"] !== "deleted")
.filter((x) => x.path_display !== `/${client.remoteBaseDir}`)
.map((x) => fromDropboxItemToEntity(x, client.remoteBaseDir));
unifiedContents.push(...unifiedContents2);
const downloadFile = getDropboxPath(key, this.remoteBaseDir);
return await this._readFileFromRoot(downloadFile);
}
fixEntityListCasesInplace(unifiedContents);
return unifiedContents;
};
const downloadFromRemoteRaw = async (
client: WrappedDropboxClient,
remotePath: string
) => {
await client.init();
const rsp = await retryReq(
() =>
client.dropbox.filesDownload({
path: remotePath,
}),
`downloadFromRemoteRaw=${remotePath}`
);
if (rsp === undefined) {
throw Error(`unknown rsp from dropbox download: ${rsp}`);
}
if ((rsp.result as any).fileBlob !== undefined) {
// we get a Blob
const content = (rsp.result as any).fileBlob as Blob;
return await content.arrayBuffer();
} else if ((rsp.result as any).fileBinary !== undefined) {
// we get a Buffer
const content = (rsp.result as any).fileBinary as Buffer;
return bufferToArrayBuffer(content);
} else {
throw Error(`unknown rsp from dropbox download: ${rsp}`);
}
};
export const downloadFromRemote = async (
client: WrappedDropboxClient,
fileOrFolderPath: string,
vault: Vault,
mtime: number,
cipher: Cipher,
remoteEncryptedKey: string = "",
skipSaving: boolean = false
) => {
await client.init();
const isFolder = fileOrFolderPath.endsWith("/");
if (!skipSaving) {
await mkdirpInVault(fileOrFolderPath, vault);
}
// the file is always local file
// we need to encrypt it
if (isFolder) {
// mkdirp locally is enough
// do nothing here
return new ArrayBuffer(0);
} else {
let downloadFile = fileOrFolderPath;
if (!cipher.isPasswordEmpty()) {
downloadFile = remoteEncryptedKey;
}
downloadFile = getDropboxPath(downloadFile, client.remoteBaseDir);
const remoteContent = await downloadFromRemoteRaw(client, downloadFile);
let localContent = remoteContent;
if (!cipher.isPasswordEmpty()) {
localContent = await cipher.decryptContent(remoteContent);
}
if (!skipSaving) {
await vault.adapter.writeBinary(fileOrFolderPath, localContent, {
mtime: mtime,
});
}
return localContent;
}
};
export const deleteFromRemote = async (
client: WrappedDropboxClient,
fileOrFolderPath: string,
cipher: Cipher,
remoteEncryptedKey: string = ""
) => {
if (fileOrFolderPath === "/") {
return;
}
let remoteFileName = fileOrFolderPath;
if (!cipher.isPasswordEmpty()) {
remoteFileName = remoteEncryptedKey;
}
remoteFileName = getDropboxPath(remoteFileName, client.remoteBaseDir);
await client.init();
try {
await retryReq(
async _readFileFromRoot(key: string): Promise<ArrayBuffer> {
const rsp = await retryReq(
() =>
client.dropbox.filesDeleteV2({
path: remoteFileName,
this.dropbox.filesDownload({
path: key,
}),
fileOrFolderPath
`downloadFromRemoteRaw=${key}`
);
} catch (err) {
console.error("some error while deleting");
console.error(err);
if (rsp === undefined) {
throw Error(`unknown rsp from dropbox download: ${rsp}`);
}
if ((rsp.result as any).fileBlob !== undefined) {
// we get a Blob
const content = (rsp.result as any).fileBlob as Blob;
return await content.arrayBuffer();
} else if ((rsp.result as any).fileBinary !== undefined) {
// we get a Buffer
const content = (rsp.result as any).fileBinary as Buffer;
return bufferToArrayBuffer(content);
} else {
throw Error(`unknown rsp from dropbox download: ${rsp}`);
}
}
};
export const checkConnectivity = async (
client: WrappedDropboxClient,
callbackFunc?: any
) => {
try {
await client.init();
const results = await getRemoteMeta(client, `/${client.remoteBaseDir}`);
if (results === undefined) {
async rename(key1: string, key2: string): Promise<void> {
const remoteFileName1 = getDropboxPath(key1, this.remoteBaseDir);
const remoteFileName2 = getDropboxPath(key2, this.remoteBaseDir);
await this._init();
try {
await retryReq(
() =>
this.dropbox.filesMoveV2({
from_path: remoteFileName1,
to_path: remoteFileName2,
}),
`${key1}=>${key2}` // just a hint here
);
} catch (err) {
console.error("some error while moving");
console.error(err);
}
}
async rm(key: string): Promise<void> {
if (key === "/") {
return;
}
const remoteFileName = getDropboxPath(key, this.remoteBaseDir);
await this._init();
try {
await retryReq(
() =>
this.dropbox.filesDeleteV2({
path: remoteFileName,
}),
key // just a hint here
);
} catch (err) {
console.error("some error while deleting");
console.error(err);
}
}
async checkConnect(callbackFunc?: any): Promise<boolean> {
try {
await this._init();
const results = await this._statFromRoot(`/${this.remoteBaseDir}`);
if (results === undefined) {
return false;
}
return true;
} catch (err) {
console.debug(err);
callbackFunc?.(err);
return false;
}
return true;
} catch (err) {
console.debug(err);
if (callbackFunc !== undefined) {
callbackFunc(err);
}
return false;
}
};
export const getUserDisplayName = async (client: WrappedDropboxClient) => {
await client.init();
const acct = await client.dropbox.usersGetCurrentAccount();
return acct.result.name.display_name;
};
async getUserDisplayName() {
await this._init();
const acct = await this.dropbox.usersGetCurrentAccount();
return acct.result.name.display_name;
}
export const revokeAuth = async (client: WrappedDropboxClient) => {
await client.init();
await client.dropbox.authTokenRevoke();
};
async revokeAuth() {
try {
await this._init();
await this.dropbox.authTokenRevoke();
return true;
} catch (e) {
return false;
}
}
allowEmptyFile(): boolean {
return true;
}
}

580
src/fsEncrypt.ts Normal file
View File

@ -0,0 +1,580 @@
import type { CipherMethodType, Entity } from "./baseTypes";
import * as openssl from "./encryptOpenSSL";
import * as rclone from "./encryptRClone";
import { isVaildText } from "./misc";
import cloneDeep from "lodash/cloneDeep";
import { FakeFs } from "./fsAll";
/**
* quick guess, no actual decryption here
* @param name
* @returns
*/
function isLikelyOpenSSLEncryptedName(name: string): boolean {
if (
name.startsWith(openssl.MAGIC_ENCRYPTED_PREFIX_BASE32) ||
name.startsWith(openssl.MAGIC_ENCRYPTED_PREFIX_BASE64URL)
) {
return true;
}
return false;
}
/**
* quick guess, no actual decryption here
* @param name
* @returns
*/
function isLikelyEncryptedName(name: string): boolean {
return isLikelyOpenSSLEncryptedName(name);
}
/**
* quick guess, no actual decryption here, only openssl can be guessed here
* @param name
* @returns
*/
function isLikelyEncryptedNameNotMatchMethod(
name: string,
method: CipherMethodType
): boolean {
if (isLikelyOpenSSLEncryptedName(name) && method !== "openssl-base64") {
return true;
}
if (!isLikelyOpenSSLEncryptedName(name) && method === "openssl-base64") {
return true;
}
return false;
}
export interface PasswordCheckType {
ok: boolean;
reason:
| "empty_remote"
| "unknown_encryption_method"
| "remote_encrypted_local_no_password"
| "password_matched"
| "password_or_method_not_matched_or_remote_not_encrypted"
| "likely_no_password_both_sides"
| "encryption_method_not_matched";
}
/**
* Useful if isPasswordEmpty()
*/
function copyEntityAndCopyKeyEncSizeEnc(entity: Entity) {
const res = cloneDeep(entity);
res["keyEnc"] = res["keyRaw"];
res["sizeEnc"] = res["sizeRaw"];
return res;
}
export class FakeFsEncrypt extends FakeFs {
innerFs: FakeFs;
readonly password: string;
readonly method: CipherMethodType;
cipherRClone?: rclone.CipherRclone;
cacheMapOrigToEnc: Record<string, string>;
hasCacheMap: boolean;
kind: string;
constructor(innerFs: FakeFs, password: string, method: CipherMethodType) {
super();
this.innerFs = innerFs;
this.password = password ?? "";
this.method = method;
this.cacheMapOrigToEnc = {};
this.hasCacheMap = false;
this.kind = `encrypt(${this.innerFs.kind},${
this.password !== "" ? method : "no password"
})`;
if (method === "rclone-base64") {
this.cipherRClone = new rclone.CipherRclone(password, 5);
}
}
isPasswordEmpty() {
return this.password === "";
}
isFolderAware() {
if (this.method === "openssl-base64") {
return false;
}
if (this.method === "rclone-base64") {
return true;
}
throw Error(`no idea about isFolderAware for method=${this.method}`);
}
async isPasswordOk(): Promise<PasswordCheckType> {
const innerWalkResult = await this.walkPartial();
if (innerWalkResult === undefined || innerWalkResult.length === 0) {
// remote empty
return {
ok: true,
reason: "empty_remote",
};
}
const santyCheckKey = innerWalkResult[0].keyRaw;
if (this.isPasswordEmpty()) {
// TODO: no way to distinguish remote rclone encrypted
// if local has no password??
if (isLikelyEncryptedName(santyCheckKey)) {
return {
ok: false,
reason: "remote_encrypted_local_no_password",
};
} else {
return {
ok: true,
reason: "likely_no_password_both_sides",
};
}
} else {
if (this.method === "unknown") {
return {
ok: false,
reason: "unknown_encryption_method",
};
}
if (isLikelyEncryptedNameNotMatchMethod(santyCheckKey, this.method)) {
return {
ok: false,
reason: "encryption_method_not_matched",
};
}
try {
const k = await this._decryptName(santyCheckKey);
if (k === undefined) {
throw Error(`decryption failed`);
}
return {
ok: true,
reason: "password_matched",
};
} catch (error) {
return {
ok: false,
reason: "password_or_method_not_matched_or_remote_not_encrypted",
};
}
}
}
async walk(): Promise<Entity[]> {
const innerWalkResult = await this.innerFs.walk();
return await this._dealWithWalk(innerWalkResult);
}
async walkPartial(): Promise<Entity[]> {
const innerWalkResult = await this.innerFs.walkPartial();
return await this._dealWithWalk(innerWalkResult);
}
async _dealWithWalk(innerWalkResult: Entity[]): Promise<Entity[]> {
const res: Entity[] = [];
if (this.isPasswordEmpty()) {
for (const innerEntity of innerWalkResult) {
res.push(copyEntityAndCopyKeyEncSizeEnc(innerEntity));
this.cacheMapOrigToEnc[innerEntity.key!] = innerEntity.key!;
}
this.hasCacheMap = true;
return res;
} else {
for (const innerEntity of innerWalkResult) {
const key = await this._decryptName(innerEntity.keyRaw);
const size = key.endsWith("/") ? 0 : undefined;
res.push({
key: key,
keyRaw: innerEntity.keyRaw,
keyEnc: innerEntity.key!,
mtimeCli: innerEntity.mtimeCli,
mtimeSvr: innerEntity.mtimeSvr,
size: size,
sizeEnc: innerEntity.size!,
sizeRaw: innerEntity.sizeRaw,
hash: undefined,
synthesizedFolder: innerEntity.synthesizedFolder,
});
this.cacheMapOrigToEnc[key] = innerEntity.keyRaw;
}
this.hasCacheMap = true;
return res;
}
}
async stat(key: string): Promise<Entity> {
if (!this.hasCacheMap) {
throw new Error("You have to build the cacheMap firstly for stat");
}
const keyEnc = this.cacheMapOrigToEnc[key];
if (keyEnc === undefined) {
throw new Error(`no encrypted key ${key} before!`);
}
const innerEntity = await this.innerFs.stat(keyEnc);
if (this.isPasswordEmpty()) {
return copyEntityAndCopyKeyEncSizeEnc(innerEntity);
} else {
return {
key: key,
keyRaw: innerEntity.keyRaw,
keyEnc: innerEntity.key!,
mtimeCli: innerEntity.mtimeCli,
mtimeSvr: innerEntity.mtimeSvr,
size: undefined,
sizeEnc: innerEntity.size!,
sizeRaw: innerEntity.sizeRaw,
hash: undefined,
synthesizedFolder: innerEntity.synthesizedFolder,
};
}
}
async mkdir(key: string, mtime?: number, ctime?: number): Promise<Entity> {
if (!this.hasCacheMap) {
throw new Error("You have to build the cacheMap firstly for mkdir");
}
if (!key.endsWith("/")) {
throw new Error(`should not call mkdir on ${key}`);
}
let keyEnc = this.cacheMapOrigToEnc[key];
if (keyEnc === undefined) {
if (this.isPasswordEmpty()) {
keyEnc = key;
} else {
keyEnc = await this._encryptName(key);
}
this.cacheMapOrigToEnc[key] = keyEnc;
}
if (this.isPasswordEmpty() || this.isFolderAware()) {
const innerEntity = await this.innerFs.mkdir(keyEnc, mtime, ctime);
return copyEntityAndCopyKeyEncSizeEnc(innerEntity);
} else {
const now = Date.now();
let content = new ArrayBuffer(0);
if (!this.innerFs.allowEmptyFile()) {
content = new ArrayBuffer(1);
}
const innerEntity = await this.innerFs.writeFile(
keyEnc,
content,
mtime ?? now,
ctime ?? now
);
return {
key: key,
keyRaw: innerEntity.keyRaw,
keyEnc: innerEntity.key!,
mtimeCli: innerEntity.mtimeCli,
mtimeSvr: innerEntity.mtimeSvr,
size: 0,
sizeEnc: innerEntity.size!,
sizeRaw: innerEntity.sizeRaw,
hash: undefined,
synthesizedFolder: innerEntity.synthesizedFolder,
};
}
}
async writeFile(
key: string,
content: ArrayBuffer,
mtime: number,
ctime: number
): Promise<Entity> {
if (!this.hasCacheMap) {
throw new Error("You have to build the cacheMap firstly for readFile");
}
let keyEnc = this.cacheMapOrigToEnc[key];
if (keyEnc === undefined) {
if (this.isPasswordEmpty()) {
keyEnc = key;
} else {
keyEnc = await this._encryptName(key);
}
this.cacheMapOrigToEnc[key] = keyEnc;
}
if (this.isPasswordEmpty()) {
const innerEntity = await this.innerFs.writeFile(
keyEnc,
content,
mtime,
ctime
);
return copyEntityAndCopyKeyEncSizeEnc(innerEntity);
} else {
const contentEnc = await this._encryptContent(content);
const innerEntity = await this.innerFs.writeFile(
keyEnc,
contentEnc,
mtime,
ctime
);
return {
key: key,
keyRaw: innerEntity.keyRaw,
keyEnc: innerEntity.key!,
mtimeCli: innerEntity.mtimeCli,
mtimeSvr: innerEntity.mtimeSvr,
size: undefined,
sizeEnc: innerEntity.size!,
sizeRaw: innerEntity.sizeRaw,
hash: undefined,
synthesizedFolder: innerEntity.synthesizedFolder,
};
}
}
async readFile(key: string): Promise<ArrayBuffer> {
if (!this.hasCacheMap) {
throw new Error("You have to build the cacheMap firstly for readFile");
}
const keyEnc = this.cacheMapOrigToEnc[key];
if (keyEnc === undefined) {
throw new Error(`no encrypted key ${key} before! cannot readFile`);
}
const contentEnc = await this.innerFs.readFile(keyEnc);
if (this.isPasswordEmpty()) {
return contentEnc;
} else {
const res = await this._decryptContent(contentEnc);
return res;
}
}
async rename(key1: string, key2: string): Promise<void> {
if (!this.hasCacheMap) {
throw new Error("You have to build the cacheMap firstly for readFile");
}
let key1Enc = this.cacheMapOrigToEnc[key1];
if (key1Enc === undefined) {
if (this.isPasswordEmpty()) {
key1Enc = key1;
} else {
key1Enc = await this._encryptName(key1);
}
this.cacheMapOrigToEnc[key1] = key1Enc;
}
let key2Enc = this.cacheMapOrigToEnc[key2];
if (key2Enc === undefined) {
if (this.isPasswordEmpty()) {
key2Enc = key2;
} else {
key2Enc = await this._encryptName(key2);
}
this.cacheMapOrigToEnc[key2] = key2Enc;
}
return await this.innerFs.rename(key1Enc, key2Enc);
}
async rm(key: string): Promise<void> {
if (!this.hasCacheMap) {
throw new Error("You have to build the cacheMap firstly for rm");
}
const keyEnc = this.cacheMapOrigToEnc[key];
if (keyEnc === undefined) {
throw new Error(`no encrypted key ${key} before! cannot rm`);
}
return await this.innerFs.rm(keyEnc);
}
async checkConnect(callbackFunc?: any): Promise<boolean> {
return await this.innerFs.checkConnect(callbackFunc);
}
async closeResources() {
if (this.method === "rclone-base64" && this.cipherRClone !== undefined) {
this.cipherRClone.closeResources();
}
}
async encryptEntity(input: Entity): Promise<Entity> {
if (input.key === undefined) {
// input.key should always have value
throw Error(`input ${input.keyRaw} is abnormal without key`);
}
if (this.isPasswordEmpty()) {
return copyEntityAndCopyKeyEncSizeEnc(input);
}
// below is for having password
const local = cloneDeep(input);
if (local.sizeEnc === undefined && local.size !== undefined) {
// it's not filled yet, we fill it
// local.size is possibly undefined if it's "prevSync" Entity
// but local.key should always have value
local.sizeEnc = this._getSizeFromOrigToEnc(local.size);
}
if (local.keyEnc === undefined || local.keyEnc === "") {
let keyEnc = this.cacheMapOrigToEnc[input.key];
if (keyEnc !== undefined && keyEnc !== "" && keyEnc !== local.key) {
// we can reuse remote encrypted key if any
local.keyEnc = keyEnc;
} else {
// we assign a new encrypted key because of no remote
keyEnc = await this._encryptName(input.key);
local.keyEnc = keyEnc;
// remember to add back to cache!
this.cacheMapOrigToEnc[input.key] = keyEnc;
}
}
return local;
}
async _encryptContent(content: ArrayBuffer) {
// console.debug("start encryptContent");
if (this.password === "") {
return content;
}
if (this.method === "openssl-base64") {
const res = await openssl.encryptArrayBuffer(content, this.password);
if (res === undefined) {
throw Error(`cannot encrypt content`);
}
return res;
} else if (this.method === "rclone-base64") {
const res =
await this.cipherRClone!.encryptContentByCallingWorker(content);
if (res === undefined) {
throw Error(`cannot encrypt content`);
}
return res;
} else {
throw Error(`not supported encrypt method=${this.method}`);
}
}
async _decryptContent(content: ArrayBuffer) {
// console.debug("start decryptContent");
if (this.password === "") {
return content;
}
if (this.method === "openssl-base64") {
const res = await openssl.decryptArrayBuffer(content, this.password);
if (res === undefined) {
throw Error(`cannot decrypt content`);
}
return res;
} else if (this.method === "rclone-base64") {
const res =
await this.cipherRClone!.decryptContentByCallingWorker(content);
if (res === undefined) {
throw Error(`cannot decrypt content`);
}
return res;
} else {
throw Error(`not supported decrypt method=${this.method}`);
}
}
async _encryptName(name: string) {
// console.debug("start encryptName");
if (this.password === "") {
return name;
}
if (this.method === "openssl-base64") {
const res = await openssl.encryptStringToBase64url(name, this.password);
if (res === undefined) {
throw Error(`cannot encrypt name=${name}`);
}
return res;
} else if (this.method === "rclone-base64") {
const res = await this.cipherRClone!.encryptNameByCallingWorker(name);
if (res === undefined) {
throw Error(`cannot encrypt name=${name}`);
}
return res;
} else {
throw Error(`not supported encrypt method=${this.method}`);
}
}
async _decryptName(name: string): Promise<string> {
// console.debug("start decryptName");
if (this.password === "") {
return name;
}
if (this.method === "openssl-base64") {
if (name.startsWith(openssl.MAGIC_ENCRYPTED_PREFIX_BASE32)) {
// backward compitable with the openssl-base32
try {
const res = await openssl.decryptBase32ToString(name, this.password);
if (res !== undefined && isVaildText(res)) {
return res;
} else {
throw Error(`cannot decrypt name=${name}`);
}
} catch (error) {
throw Error(`cannot decrypt name=${name}`);
}
} else if (name.startsWith(openssl.MAGIC_ENCRYPTED_PREFIX_BASE64URL)) {
try {
const res = await openssl.decryptBase64urlToString(
name,
this.password
);
if (res !== undefined && isVaildText(res)) {
return res;
} else {
throw Error(`cannot decrypt name=${name}`);
}
} catch (error) {
throw Error(`cannot decrypt name=${name}`);
}
} else {
throw Error(
`method=${this.method} but the name=${name}, likely mismatch`
);
}
} else if (this.method === "rclone-base64") {
const res = await this.cipherRClone!.decryptNameByCallingWorker(name);
if (res === undefined) {
throw Error(`cannot decrypt name=${name}`);
}
return res;
} else {
throw Error(`not supported decrypt method=${this.method}`);
}
}
_getSizeFromOrigToEnc(x: number) {
if (this.password === "") {
return x;
}
if (this.method === "openssl-base64") {
return openssl.getSizeFromOrigToEnc(x);
} else if (this.method === "rclone-base64") {
return rclone.getSizeFromOrigToEnc(x);
} else {
throw Error(`not supported encrypt method=${this.method}`);
}
}
async getUserDisplayName(): Promise<string> {
return await this.innerFs.getUserDisplayName();
}
async revokeAuth(): Promise<any> {
return await this.innerFs.revokeAuth();
}
allowEmptyFile(): boolean {
return true;
}
}

54
src/fsGetter.ts Normal file
View File

@ -0,0 +1,54 @@
import { FakeFsGoogleDrive } from "../pro/src/fsGoogleDrive";
import type { RemotelySavePluginSettings } from "./baseTypes";
import type { FakeFs } from "./fsAll";
import { FakeFsDropbox } from "./fsDropbox";
import { FakeFsOnedrive } from "./fsOnedrive";
import { FakeFsS3 } from "./fsS3";
import { FakeFsWebdav } from "./fsWebdav";
import { FakeFsWebdis } from "./fsWebdis";
/**
* To avoid circular dependency, we need a new file here.
*/
export function getClient(
settings: RemotelySavePluginSettings,
vaultName: string,
saveUpdatedConfigFunc: () => Promise<any>
): FakeFs {
switch (settings.serviceType) {
case "s3":
return new FakeFsS3(settings.s3);
case "webdav":
return new FakeFsWebdav(
settings.webdav,
vaultName,
saveUpdatedConfigFunc
);
case "dropbox":
return new FakeFsDropbox(
settings.dropbox,
vaultName,
saveUpdatedConfigFunc
);
case "onedrive":
return new FakeFsOnedrive(
settings.onedrive,
vaultName,
saveUpdatedConfigFunc
);
case "webdis":
return new FakeFsWebdis(
settings.webdis,
vaultName,
saveUpdatedConfigFunc
);
case "googledrive":
return new FakeFsGoogleDrive(
settings.googledrive,
vaultName,
saveUpdatedConfigFunc
);
default:
throw Error(`cannot init client for serviceType=${settings.serviceType}`);
}
}

186
src/fsLocal.ts Normal file
View File

@ -0,0 +1,186 @@
import { DEFAULT_DEBUG_FOLDER, type Entity } from "./baseTypes";
import { FakeFs } from "./fsAll";
import { TFile, TFolder, type Vault } from "obsidian";
import { mkdirpInVault, statFix, unixTimeToStr } from "./misc";
import { listFilesInObsFolder } from "./obsFolderLister";
import type { Profiler } from "./profiler";
export class FakeFsLocal extends FakeFs {
vault: Vault;
syncConfigDir: boolean;
configDir: string;
pluginID: string;
profiler: Profiler | undefined;
deleteToWhere: "obsidian" | "system";
kind: "local";
constructor(
vault: Vault,
syncConfigDir: boolean,
configDir: string,
pluginID: string,
profiler: Profiler | undefined,
deleteToWhere: "obsidian" | "system"
) {
super();
this.vault = vault;
this.syncConfigDir = syncConfigDir;
this.configDir = configDir;
this.pluginID = pluginID;
this.profiler = profiler;
this.deleteToWhere = deleteToWhere;
this.kind = "local";
}
async walk(): Promise<Entity[]> {
this.profiler?.addIndent();
this.profiler?.insert("enter walk for local");
const local: Entity[] = [];
const localTAbstractFiles = this.vault.getAllLoadedFiles();
this.profiler?.insert("finish getting walk for local");
for (const entry of localTAbstractFiles) {
let r: Entity | undefined = undefined;
let key = entry.path;
if (entry.path === "/") {
// ignore
continue;
} else if (entry instanceof TFile) {
let mtimeLocal: number | undefined = entry.stat.mtime;
if (mtimeLocal <= 0) {
mtimeLocal = entry.stat.ctime;
}
if (mtimeLocal === 0) {
mtimeLocal = undefined;
}
if (mtimeLocal === undefined) {
throw Error(
`Your file has last modified time 0: ${key}, don't know how to deal with it`
);
}
r = {
key: entry.path, // local always unencrypted
keyRaw: entry.path,
mtimeCli: mtimeLocal,
mtimeSvr: mtimeLocal,
size: entry.stat.size, // local always unencrypted
sizeRaw: entry.stat.size,
};
} else if (entry instanceof TFolder) {
key = `${entry.path}/`;
r = {
key: key,
keyRaw: key,
size: 0,
sizeRaw: 0,
};
} else {
throw Error(`unexpected ${entry}`);
}
if (r.keyRaw.startsWith(DEFAULT_DEBUG_FOLDER)) {
// skip listing the debug folder,
// which should always not involved in sync
// continue;
} else {
local.push(r);
}
}
this.profiler?.insert("finish transforming walk for local");
if (this.syncConfigDir) {
this.profiler?.insert("into syncConfigDir");
const syncFiles = await listFilesInObsFolder(
this.configDir,
this.vault,
this.pluginID
);
for (const f of syncFiles) {
local.push(f);
}
this.profiler?.insert("finish syncConfigDir");
}
this.profiler?.insert("finish walk for local");
this.profiler?.removeIndent();
return local;
}
async walkPartial(): Promise<Entity[]> {
return await this.walk();
}
async stat(key: string): Promise<Entity> {
const statRes = await statFix(this.vault, key);
if (statRes === undefined || statRes === null) {
throw Error(`${key} does not exist! cannot stat for local`);
}
const isFolder = statRes.type === "folder";
return {
key: isFolder ? `${key}/` : key, // local always unencrypted
keyRaw: isFolder ? `${key}/` : key,
mtimeCli: statRes.mtime,
mtimeSvr: statRes.mtime,
mtimeCliFmt: unixTimeToStr(statRes.mtime),
mtimeSvrFmt: unixTimeToStr(statRes.mtime),
size: statRes.size, // local always unencrypted
sizeRaw: statRes.size,
};
}
async mkdir(key: string, mtime?: number, ctime?: number): Promise<Entity> {
// console.debug(`mkdir: ${key}`);
await mkdirpInVault(key, this.vault);
return await this.stat(key);
}
async writeFile(
key: string,
content: ArrayBuffer,
mtime: number,
ctime: number
): Promise<Entity> {
await this.vault.adapter.writeBinary(key, content, {
mtime: mtime,
ctime: ctime,
});
return await this.stat(key);
}
async readFile(key: string): Promise<ArrayBuffer> {
return await this.vault.adapter.readBinary(key);
}
async rename(key1: string, key2: string): Promise<void> {
return await this.vault.adapter.rename(key1, key2);
}
async rm(key: string): Promise<void> {
if (this.deleteToWhere === "obsidian") {
await this.vault.adapter.trashLocal(key);
} else {
// "system"
if (!(await this.vault.adapter.trashSystem(key))) {
await this.vault.adapter.trashLocal(key);
}
}
}
async checkConnect(callbackFunc?: any): Promise<boolean> {
return true;
}
async getUserDisplayName(): Promise<string> {
throw new Error("Method not implemented.");
}
async revokeAuth(): Promise<any> {
throw new Error("Method not implemented.");
}
allowEmptyFile(): boolean {
return true;
}
}

64
src/fsMock.ts Normal file
View File

@ -0,0 +1,64 @@
import type { Entity } from "./baseTypes";
import { FakeFs } from "./fsAll";
export class FakeFsMock extends FakeFs {
kind: "mock";
constructor() {
super();
this.kind = "mock";
}
async walk(): Promise<Entity[]> {
throw new Error("Method not implemented.");
}
async walkPartial(): Promise<Entity[]> {
return await this.walk();
}
async stat(key: string): Promise<Entity> {
throw new Error("Method not implemented.");
}
async mkdir(key: string, mtime: number, ctime: number): Promise<Entity> {
throw new Error("Method not implemented.");
}
async writeFile(
key: string,
content: ArrayBuffer,
mtime: number,
ctime: number
): Promise<Entity> {
throw new Error("Method not implemented.");
}
async readFile(key: string): Promise<ArrayBuffer> {
throw new Error("Method not implemented.");
}
async rename(key1: string, key2: string): Promise<void> {
throw new Error("Method not implemented.");
}
async rm(key: string): Promise<void> {
throw new Error("Method not implemented.");
}
async checkConnect(callbackFunc?: any): Promise<boolean> {
return true;
}
async getUserDisplayName(): Promise<string> {
throw new Error("Method not implemented.");
}
async revokeAuth(): Promise<any> {
throw new Error("Method not implemented.");
}
allowEmptyFile(): boolean {
throw new Error("Method not implemented.");
}
}

View File

@ -1,5 +1,5 @@
import { CryptoProvider, PublicClientApplication } from "@azure/msal-node";
import { AuthenticationProvider } from "@microsoft/microsoft-graph-client";
import type { AuthenticationProvider } from "@microsoft/microsoft-graph-client";
import type {
DriveItem,
FileSystemInfo,
@ -7,23 +7,17 @@ import type {
User,
} from "@microsoft/microsoft-graph-types";
import cloneDeep from "lodash/cloneDeep";
import { request, requestUrl, requireApiVersion, Vault } from "obsidian";
import { request, requestUrl } from "obsidian";
import {
VALID_REQURL,
COMMAND_CALLBACK_ONEDRIVE,
DEFAULT_CONTENT_TYPE,
type Entity,
OAUTH2_FORCE_EXPIRE_MILLISECONDS,
OnedriveConfig,
Entity,
UploadedType,
type OnedriveConfig,
} from "./baseTypes";
import {
bufferToArrayBuffer,
getRandomArrayBuffer,
getRandomIntInclusive,
mkdirpInVault,
} from "./misc";
import { Cipher } from "./encryptUnified";
import { VALID_REQURL } from "./baseTypesObs";
import { FakeFs } from "./fsAll";
import { bufferToArrayBuffer } from "./misc";
const SCOPES = ["User.Read", "Files.ReadWrite.AppFolder", "offline_access"];
const REDIRECT_URI = `obsidian://${COMMAND_CALLBACK_ONEDRIVE}`;
@ -38,6 +32,7 @@ export const DEFAULT_ONEDRIVE_CONFIG: OnedriveConfig = {
deltaLink: "",
username: "",
credentialsShouldBeDeletedAtTime: 0,
emptyFile: "skip",
};
////////////////////////////////////////////////////////////////////////////////
@ -237,23 +232,6 @@ const getOnedrivePath = (fileOrFolderPath: string, remoteBaseDir: string) => {
return key;
};
const getNormPath = (fileOrFolderPath: string, remoteBaseDir: string) => {
const prefix = `/drive/special/approot:/${remoteBaseDir}`;
if (
!(fileOrFolderPath === prefix || fileOrFolderPath.startsWith(`${prefix}/`))
) {
throw Error(
`"${fileOrFolderPath}" doesn't starts with "${prefix}/" or equals to "${prefix}"`
);
}
if (fileOrFolderPath === prefix) {
return "/";
}
return fileOrFolderPath.slice(`${prefix}/`.length);
};
const constructFromDriveItemToEntityError = (x: DriveItem) => {
return `parentPath="${
x.parentReference?.path ?? "(no parentReference or path)"
@ -361,15 +339,21 @@ const fromDriveItemToEntity = (x: DriveItem, remoteBaseDir: string): Entity => {
const mtimeSvr = Date.parse(x?.fileSystemInfo!.lastModifiedDateTime!);
const mtimeCli = Date.parse(x?.fileSystemInfo!.lastModifiedDateTime!);
return {
key: key,
keyRaw: key,
mtimeSvr: mtimeSvr,
mtimeCli: mtimeCli,
size: isFolder ? 0 : x.size!,
sizeRaw: isFolder ? 0 : x.size!,
synthesizedFile: false,
// hash: ?? // TODO
etag: x.cTag || "", // do NOT use x.eTag because it changes if meta changes
};
};
////////////////////////////////////////////////////////////////////////////////
// The client.
////////////////////////////////////////////////////////////////////////////////
// to adapt to the required interface
class MyAuthProvider implements AuthenticationProvider {
onedriveConfig: OnedriveConfig;
@ -381,7 +365,8 @@ class MyAuthProvider implements AuthenticationProvider {
this.onedriveConfig = onedriveConfig;
this.saveUpdatedConfigFunc = saveUpdatedConfigFunc;
}
getAccessToken = async () => {
async getAccessToken() {
if (
this.onedriveConfig.accessToken === "" ||
this.onedriveConfig.refreshToken === ""
@ -415,7 +400,7 @@ class MyAuthProvider implements AuthenticationProvider {
console.info("Onedrive accessToken updated");
return this.onedriveConfig.accessToken;
}
};
}
}
/**
@ -431,25 +416,31 @@ export const getShrinkedSettings = (onedriveConfig: OnedriveConfig) => {
return config;
};
export class WrappedOnedriveClient {
export class FakeFsOnedrive extends FakeFs {
kind: "onedrive";
onedriveConfig: OnedriveConfig;
remoteBaseDir: string;
vaultFolderExists: boolean;
authGetter: MyAuthProvider;
saveUpdatedConfigFunc: () => Promise<any>;
foldersCreatedBefore: Set<string>;
constructor(
onedriveConfig: OnedriveConfig,
remoteBaseDir: string,
vaultName: string,
saveUpdatedConfigFunc: () => Promise<any>
) {
super();
this.kind = "onedrive";
this.onedriveConfig = onedriveConfig;
this.remoteBaseDir = remoteBaseDir;
this.remoteBaseDir = this.onedriveConfig.remoteBaseDir || vaultName || "";
this.vaultFolderExists = false;
this.saveUpdatedConfigFunc = saveUpdatedConfigFunc;
this.authGetter = new MyAuthProvider(onedriveConfig, saveUpdatedConfigFunc);
this.foldersCreatedBefore = new Set();
}
init = async () => {
async _init() {
// check token
if (
this.onedriveConfig.accessToken === "" ||
@ -463,14 +454,14 @@ export class WrappedOnedriveClient {
if (this.vaultFolderExists) {
// console.info(`already checked, /${this.remoteBaseDir} exist before`)
} else {
const k = await this.getJson("/drive/special/approot/children");
const k = await this._getJson("/drive/special/approot/children");
// console.debug(k);
this.vaultFolderExists =
(k.value as DriveItem[]).filter((x) => x.name === this.remoteBaseDir)
.length > 0;
if (!this.vaultFolderExists) {
console.info(`remote does not have folder /${this.remoteBaseDir}`);
await this.postJson("/drive/special/approot/children", {
await this._postJson("/drive/special/approot/children", {
name: `${this.remoteBaseDir}`,
folder: {},
"@microsoft.graph.conflictBehavior": "replace",
@ -481,9 +472,9 @@ export class WrappedOnedriveClient {
// console.info(`remote folder /${this.remoteBaseDir} exists`);
}
}
};
}
buildUrl = (pathFragOrig: string) => {
_buildUrl(pathFragOrig: string) {
const API_PREFIX = "https://graph.microsoft.com/v1.0";
let theUrl = "";
if (
@ -501,10 +492,10 @@ export class WrappedOnedriveClient {
theUrl = theUrl.replace(/#/g, "%23");
// console.debug(`building url: [${pathFragOrig}] => [${theUrl}]`)
return theUrl;
};
}
getJson = async (pathFragOrig: string) => {
const theUrl = this.buildUrl(pathFragOrig);
async _getJson(pathFragOrig: string) {
const theUrl = this._buildUrl(pathFragOrig);
console.debug(`getJson, theUrl=${theUrl}`);
return JSON.parse(
await request({
@ -517,10 +508,10 @@ export class WrappedOnedriveClient {
},
})
);
};
}
postJson = async (pathFragOrig: string, payload: any) => {
const theUrl = this.buildUrl(pathFragOrig);
async _postJson(pathFragOrig: string, payload: any) {
const theUrl = this._buildUrl(pathFragOrig);
console.debug(`postJson, theUrl=${theUrl}`);
return JSON.parse(
await request({
@ -533,10 +524,10 @@ export class WrappedOnedriveClient {
},
})
);
};
}
patchJson = async (pathFragOrig: string, payload: any) => {
const theUrl = this.buildUrl(pathFragOrig);
async _patchJson(pathFragOrig: string, payload: any) {
const theUrl = this._buildUrl(pathFragOrig);
console.debug(`patchJson, theUrl=${theUrl}`);
return JSON.parse(
await request({
@ -549,10 +540,10 @@ export class WrappedOnedriveClient {
},
})
);
};
}
deleteJson = async (pathFragOrig: string) => {
const theUrl = this.buildUrl(pathFragOrig);
async _deleteJson(pathFragOrig: string) {
const theUrl = this._buildUrl(pathFragOrig);
console.debug(`deleteJson, theUrl=${theUrl}`);
if (VALID_REQURL) {
await requestUrl({
@ -570,14 +561,15 @@ export class WrappedOnedriveClient {
},
});
}
};
}
putArrayBuffer = async (pathFragOrig: string, payload: ArrayBuffer) => {
const theUrl = this.buildUrl(pathFragOrig);
async _putArrayBuffer(pathFragOrig: string, payload: ArrayBuffer) {
const theUrl = this._buildUrl(pathFragOrig);
console.debug(`putArrayBuffer, theUrl=${theUrl}`);
// TODO:
// 20220401: On Android, requestUrl has issue that text becomes base64.
// Use fetch everywhere instead!
// biome-ignore lint/correctness/noConstantCondition: hard code
if (false /*VALID_REQURL*/) {
const res = await requestUrl({
url: theUrl,
@ -601,7 +593,7 @@ export class WrappedOnedriveClient {
});
return (await res.json()) as DriveItem | UploadSession;
}
};
}
/**
* A specialized function to upload large files by parts
@ -611,14 +603,14 @@ export class WrappedOnedriveClient {
* @param rangeEnd the end, exclusive
* @param size
*/
putUint8ArrayByRange = async (
async _putUint8ArrayByRange(
pathFragOrig: string,
payload: Uint8Array,
rangeStart: number,
rangeEnd: number,
size: number
) => {
const theUrl = this.buildUrl(pathFragOrig);
) {
const theUrl = this._buildUrl(pathFragOrig);
console.debug(
`putUint8ArrayByRange, theUrl=${theUrl}, range=${rangeStart}-${
rangeEnd - 1
@ -628,6 +620,7 @@ export class WrappedOnedriveClient {
// TODO:
// 20220401: On Android, requestUrl has issue that text becomes base64.
// Use fetch everywhere instead!
// biome-ignore lint/correctness/noConstantCondition: hard code
if (false /*VALID_REQURL*/) {
const res = await requestUrl({
url: theUrl,
@ -654,202 +647,181 @@ export class WrappedOnedriveClient {
});
return (await res.json()) as DriveItem | UploadSession;
}
};
}
export const getOnedriveClient = (
onedriveConfig: OnedriveConfig,
remoteBaseDir: string,
saveUpdatedConfigFunc: () => Promise<any>
) => {
return new WrappedOnedriveClient(
onedriveConfig,
remoteBaseDir,
saveUpdatedConfigFunc
);
};
/**
* Use delta api to list all files and folders
* https://docs.microsoft.com/en-us/onedrive/developer/rest-api/api/driveitem_delta?view=odsp-graph-online
* @param client
*/
export const listAllFromRemote = async (client: WrappedOnedriveClient) => {
await client.init();
const NEXT_LINK_KEY = "@odata.nextLink";
const DELTA_LINK_KEY = "@odata.deltaLink";
let res = await client.getJson(
`/drive/special/approot:/${client.remoteBaseDir}:/delta`
);
let driveItems = res.value as DriveItem[];
// console.debug(driveItems);
while (NEXT_LINK_KEY in res) {
res = await client.getJson(res[NEXT_LINK_KEY]);
driveItems.push(...cloneDeep(res.value as DriveItem[]));
}
// lastly we should have delta link?
if (DELTA_LINK_KEY in res) {
client.onedriveConfig.deltaLink = res[DELTA_LINK_KEY];
await client.saveUpdatedConfigFunc();
}
/**
* Use delta api to list all files and folders
* https://docs.microsoft.com/en-us/onedrive/developer/rest-api/api/driveitem_delta?view=odsp-graph-online
*/
async walk(): Promise<Entity[]> {
await this._init();
// unify everything to Entity
const unifiedContents = driveItems
.map((x) => fromDriveItemToEntity(x, client.remoteBaseDir))
.filter((x) => x.keyRaw !== "/");
const NEXT_LINK_KEY = "@odata.nextLink";
const DELTA_LINK_KEY = "@odata.deltaLink";
return unifiedContents;
};
let res = await this._getJson(
`/drive/special/approot:/${this.remoteBaseDir}:/delta`
);
const driveItems = res.value as DriveItem[];
// console.debug(driveItems);
export const getRemoteMeta = async (
client: WrappedOnedriveClient,
remotePath: string
) => {
await client.init();
// console.info(`remotePath=${remotePath}`);
const rsp = await client.getJson(
`${remotePath}?$select=cTag,eTag,fileSystemInfo,folder,file,name,parentReference,size`
);
// console.info(rsp);
const driveItem = rsp as DriveItem;
const res = fromDriveItemToEntity(driveItem, client.remoteBaseDir);
// console.info(res);
return res;
};
export const uploadToRemote = async (
client: WrappedOnedriveClient,
fileOrFolderPath: string,
vault: Vault | undefined,
isRecursively: boolean,
cipher: Cipher,
remoteEncryptedKey: string = "",
foldersCreatedBefore: Set<string> | undefined = undefined,
uploadRaw: boolean = false,
rawContent: string | ArrayBuffer = ""
): Promise<UploadedType> => {
await client.init();
let uploadFile = fileOrFolderPath;
if (!cipher.isPasswordEmpty()) {
if (remoteEncryptedKey === undefined || remoteEncryptedKey === "") {
throw Error(
`uploadToRemote(onedrive) you have password but remoteEncryptedKey is empty!`
);
while (NEXT_LINK_KEY in res) {
res = await this._getJson(res[NEXT_LINK_KEY]);
driveItems.push(...cloneDeep(res.value as DriveItem[]));
}
uploadFile = remoteEncryptedKey;
}
uploadFile = getOnedrivePath(uploadFile, client.remoteBaseDir);
console.debug(`uploadFile=${uploadFile}`);
let mtime = 0;
let ctime = 0;
const s = await vault?.adapter?.stat(fileOrFolderPath);
if (s !== undefined && s !== null) {
mtime = s.mtime;
ctime = s.ctime;
}
const ctimeStr = new Date(ctime).toISOString();
const mtimeStr = new Date(mtime).toISOString();
const isFolder = fileOrFolderPath.endsWith("/");
if (isFolder && isRecursively) {
throw Error("upload function doesn't implement recursive function yet!");
} else if (isFolder && !isRecursively) {
if (uploadRaw) {
throw Error(`you specify uploadRaw, but you also provide a folder key!`);
// lastly we should have delta link?
if (DELTA_LINK_KEY in res) {
this.onedriveConfig.deltaLink = res[DELTA_LINK_KEY];
await this.saveUpdatedConfigFunc();
}
// folder
if (cipher.isPasswordEmpty() || cipher.isFolderAware()) {
// if not encrypted, || encrypted isFolderAware, mkdir a remote folder
if (foldersCreatedBefore?.has(uploadFile)) {
// created, pass
} else {
// https://stackoverflow.com/questions/56479865/creating-nested-folders-in-one-go-onedrive-api
// use PATCH to create folder recursively!!!
let k: any = {
folder: {},
"@microsoft.graph.conflictBehavior": "replace",
};
if (mtime !== 0 && ctime !== 0) {
k = {
folder: {},
"@microsoft.graph.conflictBehavior": "replace",
fileSystemInfo: {
lastModifiedDateTime: mtimeStr,
createdDateTime: ctimeStr,
} as FileSystemInfo,
};
}
await client.patchJson(uploadFile, k);
}
const res = await getRemoteMeta(client, uploadFile);
return {
entity: res,
mtimeCli: mtime,
};
// unify everything to Entity
const unifiedContents = driveItems
.map((x) => fromDriveItemToEntity(x, this.remoteBaseDir))
.filter((x) => x.key !== "/");
return unifiedContents;
}
async walkPartial(): Promise<Entity[]> {
await this._init();
const DELTA_LINK_KEY = "@odata.deltaLink";
const res = await this._getJson(
`/drive/special/approot:/${this.remoteBaseDir}:/delta`
);
const driveItems = res.value as DriveItem[];
// console.debug(driveItems);
// lastly we should have delta link?
if (DELTA_LINK_KEY in res) {
this.onedriveConfig.deltaLink = res[DELTA_LINK_KEY];
await this.saveUpdatedConfigFunc();
}
// unify everything to Entity
const unifiedContents = driveItems
.map((x) => fromDriveItemToEntity(x, this.remoteBaseDir))
.filter((x) => x.key !== "/");
return unifiedContents;
}
async stat(key: string): Promise<Entity> {
await this._init();
return await this._statFromRoot(getOnedrivePath(key, this.remoteBaseDir));
}
async _statFromRoot(key: string): Promise<Entity> {
// console.info(`remotePath=${remotePath}`);
const rsp = await this._getJson(
`${key}?$select=cTag,eTag,fileSystemInfo,folder,file,name,parentReference,size`
);
// console.info(rsp);
const driveItem = rsp as DriveItem;
const res = fromDriveItemToEntity(driveItem, this.remoteBaseDir);
// console.info(res);
return res;
}
async mkdir(key: string, mtime?: number, ctime?: number): Promise<Entity> {
if (!key.endsWith("/")) {
throw Error(`you should not call mkdir on ${key}`);
}
await this._init();
const uploadFolder = getOnedrivePath(key, this.remoteBaseDir);
console.debug(`mkdir uploadFolder=${uploadFolder}`);
return await this._mkdirFromRoot(uploadFolder, mtime, ctime);
}
async _mkdirFromRoot(
key: string,
mtime?: number,
ctime?: number
): Promise<Entity> {
// console.debug(`foldersCreatedBefore=${Array.from(this.foldersCreatedBefore)}`);
if (this.foldersCreatedBefore.has(key)) {
// created, pass
// console.debug(`folder ${key} created.`)
} else {
// if encrypted && !isFolderAware(),
// upload a fake, random-size file
// with the encrypted file name
const byteLengthRandom = getRandomIntInclusive(
1,
65536 /* max allowed */
);
const arrBufRandom = await cipher.encryptContent(
getRandomArrayBuffer(byteLengthRandom)
);
// an encrypted folder is always small, we just use put here
await client.putArrayBuffer(
`${uploadFile}:/content?${new URLSearchParams({
"@microsoft.graph.conflictBehavior": "replace",
})}`,
arrBufRandom
);
if (mtime !== 0 && ctime !== 0) {
await client.patchJson(`${uploadFile}`, {
fileSystemInfo: {
lastModifiedDateTime: mtimeStr,
createdDateTime: ctimeStr,
} as FileSystemInfo,
});
}
// console.info(uploadResult)
const res = await getRemoteMeta(client, uploadFile);
return {
entity: res,
mtimeCli: mtime,
// https://stackoverflow.com/questions/56479865/creating-nested-folders-in-one-go-onedrive-api
// use PATCH to create folder recursively!!!
const playload: any = {
folder: {},
"@microsoft.graph.conflictBehavior": "replace",
};
}
} else {
// file
// we ignore isRecursively parameter here
let localContent = undefined;
if (uploadRaw) {
if (typeof rawContent === "string") {
localContent = new TextEncoder().encode(rawContent).buffer;
} else {
localContent = rawContent;
const fileSystemInfo: Record<string, string> = {};
if (mtime !== undefined && mtime !== 0) {
const mtimeStr = new Date(mtime).toISOString();
fileSystemInfo["lastModifiedDateTime"] = mtimeStr;
}
} else {
if (vault === undefined) {
throw new Error(
`the vault variable is not passed but we want to read ${fileOrFolderPath} for OneDrive`
if (ctime !== undefined && ctime !== 0) {
const ctimeStr = new Date(ctime).toISOString();
fileSystemInfo["createdDateTime"] = ctimeStr;
}
if (Object.keys(fileSystemInfo).length > 0) {
playload["fileSystemInfo"] = fileSystemInfo;
}
await this._patchJson(key, playload);
}
const res = await this._statFromRoot(key);
return res;
}
async writeFile(
key: string,
content: ArrayBuffer,
mtime: number,
ctime: number
): Promise<Entity> {
if (key.endsWith("/")) {
throw Error(`you should not call writeFile on ${key}`);
}
await this._init();
const uploadFile = getOnedrivePath(key, this.remoteBaseDir);
console.debug(`uploadFile=${uploadFile}`);
return await this._writeFileFromRoot(
uploadFile,
content,
mtime,
ctime,
key,
this.onedriveConfig.emptyFile
);
}
async _writeFileFromRoot(
key: string,
content: ArrayBuffer,
mtime: number,
ctime: number,
origKey: string,
emptyFile: "skip" | "error"
): Promise<Entity> {
if (content.byteLength === 0) {
if (emptyFile === "error") {
throw Error(
`${origKey}: Empty file is not allowed in OneDrive, and please write something in it.`
);
} else {
return {
key: origKey,
keyRaw: origKey,
mtimeSvr: mtime,
mtimeCli: mtime,
size: 0,
sizeRaw: 0,
synthesizedFile: true,
// hash: ?? // TODO
};
}
localContent = await vault.adapter.readBinary(fileOrFolderPath);
}
let remoteContent = localContent;
if (!cipher.isPasswordEmpty()) {
remoteContent = await cipher.encryptContent(localContent);
}
const ctimeStr = new Date(ctime).toISOString();
const mtimeStr = new Date(mtime).toISOString();
// no need to create parent folders firstly, cool!
// hard code range size
@ -857,16 +829,16 @@ export const uploadToRemote = async (
const RANGE_SIZE = MIN_UNIT * 20; // about 6.5536 MB
const DIRECT_UPLOAD_MAX_SIZE = 1000 * 1000 * 4; // 4 Megabyte
if (remoteContent.byteLength < DIRECT_UPLOAD_MAX_SIZE) {
if (content.byteLength < DIRECT_UPLOAD_MAX_SIZE) {
// directly using put!
await client.putArrayBuffer(
`${uploadFile}:/content?${new URLSearchParams({
await this._putArrayBuffer(
`${key}:/content?${new URLSearchParams({
"@microsoft.graph.conflictBehavior": "replace",
})}`,
remoteContent
content
);
if (mtime !== 0 && ctime !== 0) {
await client.patchJson(`${uploadFile}`, {
await this._patchJson(key, {
fileSystemInfo: {
lastModifiedDateTime: mtimeStr,
createdDateTime: ctimeStr,
@ -879,13 +851,13 @@ export const uploadToRemote = async (
// 1. create uploadSession
// uploadFile already starts with /drive/special/approot:/${remoteBaseDir}
let k: any = {
let playload: any = {
item: {
"@microsoft.graph.conflictBehavior": "replace",
},
};
if (mtime !== 0 && ctime !== 0) {
k = {
playload = {
item: {
"@microsoft.graph.conflictBehavior": "replace",
@ -897,9 +869,9 @@ export const uploadToRemote = async (
},
};
}
const s: UploadSession = await client.postJson(
`${uploadFile}:/createUploadSession`,
k
const s: UploadSession = await this._postJson(
`${key}:/createUploadSession`,
playload
);
const uploadUrl = s.uploadUrl!;
console.debug("uploadSession = ");
@ -907,12 +879,12 @@ export const uploadToRemote = async (
// 2. upload by ranges
// convert to uint8
const uint8 = new Uint8Array(remoteContent);
const uint8 = new Uint8Array(content);
// upload the ranges one by one
let rangeStart = 0;
while (rangeStart < uint8.byteLength) {
await client.putUint8ArrayByRange(
await this._putUint8ArrayByRange(
uploadUrl,
uint8,
rangeStart,
@ -923,132 +895,95 @@ export const uploadToRemote = async (
}
}
const res = await getRemoteMeta(client, uploadFile);
return {
entity: res,
mtimeCli: mtime,
};
}
};
const downloadFromRemoteRaw = async (
client: WrappedOnedriveClient,
remotePath: string
): Promise<ArrayBuffer> => {
await client.init();
const rsp = await client.getJson(
`${remotePath}?$select=@microsoft.graph.downloadUrl`
);
const downloadUrl: string = rsp["@microsoft.graph.downloadUrl"];
if (VALID_REQURL) {
const content = (
await requestUrl({
url: downloadUrl,
headers: { "Cache-Control": "no-cache" },
})
).arrayBuffer;
return content;
} else {
const content = await // cannot set no-cache here, will have cors error
(await fetch(downloadUrl)).arrayBuffer();
return content;
}
};
export const downloadFromRemote = async (
client: WrappedOnedriveClient,
fileOrFolderPath: string,
vault: Vault,
mtime: number,
cipher: Cipher,
remoteEncryptedKey: string = "",
skipSaving: boolean = false
) => {
await client.init();
const isFolder = fileOrFolderPath.endsWith("/");
if (!skipSaving) {
await mkdirpInVault(fileOrFolderPath, vault);
const res = await this._statFromRoot(key);
return res;
}
if (isFolder) {
// mkdirp locally is enough
// do nothing here
return new ArrayBuffer(0);
} else {
let downloadFile = fileOrFolderPath;
if (!cipher.isPasswordEmpty()) {
downloadFile = remoteEncryptedKey;
async readFile(key: string): Promise<ArrayBuffer> {
await this._init();
if (key.endsWith("/")) {
throw new Error(`you should not call readFile on folder ${key}`);
}
downloadFile = getOnedrivePath(downloadFile, client.remoteBaseDir);
const remoteContent = await downloadFromRemoteRaw(client, downloadFile);
let localContent = remoteContent;
if (!cipher.isPasswordEmpty()) {
localContent = await cipher.decryptContent(remoteContent);
}
if (!skipSaving) {
await vault.adapter.writeBinary(fileOrFolderPath, localContent, {
mtime: mtime,
});
}
return localContent;
const downloadFile = getOnedrivePath(key, this.remoteBaseDir);
return await this._readFileFromRoot(downloadFile);
}
};
export const deleteFromRemote = async (
client: WrappedOnedriveClient,
fileOrFolderPath: string,
cipher: Cipher,
remoteEncryptedKey: string = ""
) => {
if (fileOrFolderPath === "/") {
return;
}
let remoteFileName = fileOrFolderPath;
if (!cipher.isPasswordEmpty()) {
remoteFileName = remoteEncryptedKey;
}
remoteFileName = getOnedrivePath(remoteFileName, client.remoteBaseDir);
await client.init();
await client.deleteJson(remoteFileName);
};
export const checkConnectivity = async (
client: WrappedOnedriveClient,
callbackFunc?: any
) => {
try {
const k = await getUserDisplayName(client);
return k !== "<unknown display name>";
} catch (err) {
console.debug(err);
if (callbackFunc !== undefined) {
callbackFunc(err);
async _readFileFromRoot(key: string): Promise<ArrayBuffer> {
const rsp = await this._getJson(
`${key}?$select=@microsoft.graph.downloadUrl`
);
const downloadUrl: string = rsp["@microsoft.graph.downloadUrl"];
if (VALID_REQURL) {
const content = (
await requestUrl({
url: downloadUrl,
headers: { "Cache-Control": "no-cache" },
})
).arrayBuffer;
return content;
} else {
// cannot set no-cache here, will have cors error
const content = await (await fetch(downloadUrl)).arrayBuffer();
return content;
}
}
async rename(key1: string, key2: string): Promise<void> {
if (key1 === "" || key1 === "/" || key2 === "" || key2 === "/") {
return;
}
const remoteFileName1 = getOnedrivePath(key1, this.remoteBaseDir);
const remoteFileName2 = getOnedrivePath(key2, this.remoteBaseDir);
await this._init();
await this._patchJson(remoteFileName1, {
name: remoteFileName2,
});
}
async rm(key: string): Promise<void> {
if (key === "" || key === "/") {
return;
}
const remoteFileName = getOnedrivePath(key, this.remoteBaseDir);
await this._init();
await this._deleteJson(remoteFileName);
}
async checkConnect(callbackFunc?: any): Promise<boolean> {
try {
const k = await this.getUserDisplayName();
return k !== "<unknown display name>";
} catch (err) {
console.debug(err);
callbackFunc?.(err);
return false;
}
}
async getUserDisplayName() {
await this._init();
const res: User = await this._getJson("/me?$select=displayName");
return res.displayName || "<unknown display name>";
}
/**
*
* https://docs.microsoft.com/en-us/azure/active-directory/develop/v2-protocols-oidc#send-a-sign-out-request
* https://docs.microsoft.com/en-us/graph/api/user-revokesigninsessions
* https://docs.microsoft.com/en-us/graph/api/user-invalidateallrefreshtokens
*/
async revokeAuth() {
// await this._init();
// await this._postJson("/me/revokeSignInSessions", {});
throw new Error("Method not implemented.");
}
async getRevokeAddr() {
return "https://account.live.com/consent/Manage";
}
allowEmptyFile(): boolean {
return false;
}
};
export const getUserDisplayName = async (client: WrappedOnedriveClient) => {
await client.init();
const res: User = await client.getJson("/me?$select=displayName");
return res.displayName || "<unknown display name>";
};
/**
*
* https://docs.microsoft.com/en-us/azure/active-directory/develop/v2-protocols-oidc#send-a-sign-out-request
* https://docs.microsoft.com/en-us/graph/api/user-revokesigninsessions
* https://docs.microsoft.com/en-us/graph/api/user-invalidateallrefreshtokens
* @param client
*/
// export const revokeAuth = async (client: WrappedOnedriveClient) => {
// await client.init();
// await client.postJson('/me/revokeSignInSessions', {});
// };
export const getRevokeAddr = async () => {
return "https://account.live.com/consent/Manage";
};
}

838
src/fsS3.ts Normal file
View File

@ -0,0 +1,838 @@
import { Buffer } from "buffer";
import * as path from "path";
import { Readable } from "stream";
import type { PutObjectCommandInput, _Object } from "@aws-sdk/client-s3";
import {
DeleteObjectCommand,
GetObjectCommand,
HeadObjectCommand,
type HeadObjectCommandOutput,
ListObjectsV2Command,
type ListObjectsV2CommandInput,
PutObjectCommand,
S3Client,
} from "@aws-sdk/client-s3";
import { Upload } from "@aws-sdk/lib-storage";
import type { HttpHandlerOptions } from "@aws-sdk/types";
import {
FetchHttpHandler,
type FetchHttpHandlerOptions,
} from "@smithy/fetch-http-handler";
// @ts-ignore
import { requestTimeout } from "@smithy/fetch-http-handler/dist-es/request-timeout";
import { type HttpRequest, HttpResponse } from "@smithy/protocol-http";
import { buildQueryString } from "@smithy/querystring-builder";
// biome-ignore lint/suspicious/noShadowRestrictedNames: <explanation>
import AggregateError from "aggregate-error";
import * as mime from "mime-types";
import { Platform, type RequestUrlParam, requestUrl } from "obsidian";
import PQueue from "p-queue";
import { DEFAULT_CONTENT_TYPE, type S3Config } from "./baseTypes";
import { VALID_REQURL } from "./baseTypesObs";
import { bufferToArrayBuffer, getFolderLevels } from "./misc";
import type { Entity } from "./baseTypes";
import { FakeFs } from "./fsAll";
////////////////////////////////////////////////////////////////////////////////
// special handler using Obsidian requestUrl
////////////////////////////////////////////////////////////////////////////////
/**
* This is close to origin implementation of FetchHttpHandler
* https://github.com/aws/aws-sdk-js-v3/blob/main/packages/fetch-http-handler/src/fetch-http-handler.ts
* that is released under Apache 2 License.
* But this uses Obsidian requestUrl instead.
*/
class ObsHttpHandler extends FetchHttpHandler {
requestTimeoutInMs: number | undefined;
reverseProxyNoSignUrl: string | undefined;
constructor(
options?: FetchHttpHandlerOptions,
reverseProxyNoSignUrl?: string
) {
super(options);
this.requestTimeoutInMs =
options === undefined ? undefined : options.requestTimeout;
this.reverseProxyNoSignUrl = reverseProxyNoSignUrl;
}
async handle(
request: HttpRequest,
{ abortSignal }: HttpHandlerOptions = {}
): Promise<{ response: HttpResponse }> {
if (abortSignal?.aborted) {
const abortError = new Error("Request aborted");
abortError.name = "AbortError";
return Promise.reject(abortError);
}
let path = request.path;
if (request.query) {
const queryString = buildQueryString(request.query);
if (queryString) {
path += `?${queryString}`;
}
}
const { port, method } = request;
let url = `${request.protocol}//${request.hostname}${
port ? `:${port}` : ""
}${path}`;
if (
this.reverseProxyNoSignUrl !== undefined &&
this.reverseProxyNoSignUrl !== ""
) {
const urlObj = new URL(url);
urlObj.host = this.reverseProxyNoSignUrl;
url = urlObj.href;
}
const body =
method === "GET" || method === "HEAD" ? undefined : request.body;
const transformedHeaders: Record<string, string> = {};
for (const key of Object.keys(request.headers)) {
const keyLower = key.toLowerCase();
if (keyLower === "host" || keyLower === "content-length") {
continue;
}
transformedHeaders[keyLower] = request.headers[key];
}
let contentType: string | undefined = undefined;
if (transformedHeaders["content-type"] !== undefined) {
contentType = transformedHeaders["content-type"];
}
let transformedBody: any = body;
if (ArrayBuffer.isView(body)) {
transformedBody = bufferToArrayBuffer(body);
}
const param: RequestUrlParam = {
body: transformedBody,
headers: transformedHeaders,
method: method,
url: url,
contentType: contentType,
};
const raceOfPromises = [
requestUrl(param).then((rsp) => {
const headers = rsp.headers;
const headersLower: Record<string, string> = {};
for (const key of Object.keys(headers)) {
headersLower[key.toLowerCase()] = headers[key];
}
const stream = new ReadableStream<Uint8Array>({
start(controller) {
controller.enqueue(new Uint8Array(rsp.arrayBuffer));
controller.close();
},
});
return {
response: new HttpResponse({
headers: headersLower,
statusCode: rsp.status,
body: stream,
}),
};
}),
requestTimeout(this.requestTimeoutInMs),
];
if (abortSignal) {
raceOfPromises.push(
new Promise<never>((resolve, reject) => {
abortSignal.onabort = () => {
const abortError = new Error("Request aborted");
abortError.name = "AbortError";
reject(abortError);
};
})
);
}
return Promise.race(raceOfPromises);
}
}
////////////////////////////////////////////////////////////////////////////////
// other stuffs
////////////////////////////////////////////////////////////////////////////////
export const simpleTransRemotePrefix = (x: string) => {
if (x === undefined) {
return "";
}
let y = path.posix.normalize(x.trim());
if (y === undefined || y === "" || y === "/" || y === ".") {
return "";
}
if (y.startsWith("/")) {
y = y.slice(1);
}
if (!y.endsWith("/")) {
y = `${y}/`;
}
return y;
};
export const DEFAULT_S3_CONFIG: S3Config = {
s3Endpoint: "",
s3Region: "",
s3AccessKeyID: "",
s3SecretAccessKey: "",
s3BucketName: "",
bypassCorsLocally: true,
partsConcurrency: 20,
forcePathStyle: false,
remotePrefix: "",
useAccurateMTime: false, // it causes money, disable by default
reverseProxyNoSignUrl: "",
generateFolderObject: false, // new version, by default not generate folders
};
/**
* The Body of resp of aws GetObject has mix types
* and we want to get ArrayBuffer here.
* See https://github.com/aws/aws-sdk-js-v3/issues/1877
* @param b The Body of GetObject
* @returns Promise<ArrayBuffer>
*/
const getObjectBodyToArrayBuffer = async (
b: Readable | ReadableStream | Blob | undefined
) => {
if (b === undefined) {
throw Error(`ObjectBody is undefined and don't know how to deal with it`);
}
if (b instanceof Readable) {
return (await new Promise((resolve, reject) => {
const chunks: Uint8Array[] = [];
b.on("data", (chunk) => chunks.push(chunk));
b.on("error", reject);
b.on("end", () => resolve(bufferToArrayBuffer(Buffer.concat(chunks))));
})) as ArrayBuffer;
} else if (b instanceof ReadableStream) {
return await new Response(b, {}).arrayBuffer();
} else if (b instanceof Blob) {
return await b.arrayBuffer();
} else {
throw TypeError(`The type of ${b} is not one of the supported types`);
}
};
const getS3Client = (s3Config: S3Config) => {
let endpoint = s3Config.s3Endpoint;
if (!(endpoint.startsWith("http://") || endpoint.startsWith("https://"))) {
endpoint = `https://${endpoint}`;
}
let s3Client: S3Client;
if (VALID_REQURL && s3Config.bypassCorsLocally) {
s3Client = new S3Client({
region: s3Config.s3Region,
endpoint: endpoint,
forcePathStyle: s3Config.forcePathStyle,
credentials: {
accessKeyId: s3Config.s3AccessKeyID,
secretAccessKey: s3Config.s3SecretAccessKey,
},
requestHandler: new ObsHttpHandler(
undefined,
s3Config.reverseProxyNoSignUrl
),
});
} else {
s3Client = new S3Client({
region: s3Config.s3Region,
endpoint: endpoint,
forcePathStyle: s3Config.forcePathStyle,
credentials: {
accessKeyId: s3Config.s3AccessKeyID,
secretAccessKey: s3Config.s3SecretAccessKey,
},
});
}
s3Client.middlewareStack.add(
(next, context) => (args) => {
(args.request as any).headers["cache-control"] = "no-cache";
return next(args);
},
{
step: "build",
}
);
return s3Client;
};
const getLocalNoPrefixPath = (
fileOrFolderPathWithRemotePrefix: string,
remotePrefix: string
) => {
if (
!(
fileOrFolderPathWithRemotePrefix === `${remotePrefix}` ||
fileOrFolderPathWithRemotePrefix.startsWith(`${remotePrefix}`)
)
) {
throw Error(
`"${fileOrFolderPathWithRemotePrefix}" doesn't starts with "${remotePrefix}"`
);
}
return fileOrFolderPathWithRemotePrefix.slice(`${remotePrefix}`.length);
};
const getRemoteWithPrefixPath = (
fileOrFolderPath: string,
remotePrefix: string
) => {
if (remotePrefix === undefined || remotePrefix === "") {
return fileOrFolderPath;
}
let key = fileOrFolderPath;
if (fileOrFolderPath === "/" || fileOrFolderPath === "") {
// special
key = remotePrefix;
}
if (!fileOrFolderPath.startsWith("/")) {
key = `${remotePrefix}${fileOrFolderPath}`;
}
return key;
};
const fromS3ObjectToEntity = (
x: _Object,
remotePrefix: string,
mtimeRecords: Record<string, number>,
ctimeRecords: Record<string, number>
) => {
// console.debug(`fromS3ObjectToEntity: ${x.Key!}, ${JSON.stringify(x,null,2)}`);
// S3 officially only supports seconds precision!!!!!
const mtimeSvr = Math.floor(x.LastModified!.valueOf() / 1000.0) * 1000;
let mtimeCli = mtimeSvr;
if (x.Key! in mtimeRecords) {
const m2 = mtimeRecords[x.Key!];
if (m2 !== 0) {
// to be compatible with RClone, we read and store the time in seconds in new version!
if (m2 >= 1000000000000) {
// it's a millsecond, uploaded by old codes..
mtimeCli = m2;
} else {
// it's a second, uploaded by new codes of the plugin from March 24, 2024
mtimeCli = m2 * 1000;
}
}
}
const key = getLocalNoPrefixPath(x.Key!, remotePrefix); // we remove prefix here
const r: Entity = {
key: key, // from s3's repsective, the keyRaw is the key, we will change it in decyption
keyRaw: key,
mtimeSvr: mtimeSvr,
mtimeCli: mtimeCli,
sizeRaw: x.Size!,
size: x.Size!, // from s3's repsective, the sizeRaw is the size, we will change it in decyption
etag: x.ETag,
synthesizedFolder: false,
};
return r;
};
const fromS3HeadObjectToEntity = (
fileOrFolderPathWithRemotePrefix: string,
x: HeadObjectCommandOutput,
remotePrefix: string,
useAccurateMTime: boolean
) => {
// console.debug(`fromS3HeadObjectToEntity: ${fileOrFolderPathWithRemotePrefix}: ${JSON.stringify(x,null,2)}`);
// S3 officially only supports seconds precision!!!!!
const mtimeSvr = Math.floor(x.LastModified!.valueOf() / 1000.0) * 1000;
let mtimeCli = mtimeSvr;
if (useAccurateMTime && x.Metadata !== undefined) {
const m2 = Math.floor(
Number.parseFloat(x.Metadata.mtime || x.Metadata.MTime || "0")
);
if (m2 !== 0) {
// to be compatible with RClone, we read and store the time in seconds in new version!
if (m2 >= 1000000000000) {
// it's a millsecond, uploaded by old codes..
mtimeCli = m2;
} else {
// it's a second, uploaded by new codes of the plugin from March 24, 2024
mtimeCli = m2 * 1000;
}
}
}
// console.debug(
// `fromS3HeadObjectToEntity, fileOrFolderPathWithRemotePrefix=${fileOrFolderPathWithRemotePrefix}, remotePrefix=${remotePrefix}, x=${JSON.stringify(
// x
// )} `
// );
const key = getLocalNoPrefixPath(
fileOrFolderPathWithRemotePrefix,
remotePrefix
);
// console.debug(`fromS3HeadObjectToEntity, key=${key} after removing prefix`);
return {
key: key,
keyRaw: key,
mtimeSvr: mtimeSvr,
mtimeCli: mtimeCli,
sizeRaw: x.ContentLength,
size: x.ContentLength,
etag: x.ETag,
synthesizedFolder: false,
} as Entity;
};
export class FakeFsS3 extends FakeFs {
s3Config: S3Config;
s3Client: S3Client;
kind: "s3";
synthFoldersCache: Record<string, Entity>;
constructor(s3Config: S3Config) {
super();
this.s3Config = s3Config;
this.s3Client = getS3Client(s3Config);
this.kind = "s3";
this.synthFoldersCache = {};
}
async walk(): Promise<Entity[]> {
const res = (
await this._walkFromRoot(this.s3Config.remotePrefix, false)
).filter((x) => x.key !== "" && x.key !== "/");
return res;
}
async walkPartial(): Promise<Entity[]> {
const res = (
await this._walkFromRoot(this.s3Config.remotePrefix, true)
).filter((x) => x.key !== "" && x.key !== "/");
return res;
}
/**
* the input key contains basedir (prefix),
* but the result doesn't contain it.
*/
async _walkFromRoot(prefixOfRawKeys: string | undefined, partial: boolean) {
const confCmd = {
Bucket: this.s3Config.s3BucketName,
} as ListObjectsV2CommandInput;
if (prefixOfRawKeys !== undefined && prefixOfRawKeys !== "") {
confCmd.Prefix = prefixOfRawKeys;
}
if (partial) {
confCmd.MaxKeys = 10; // no need to list more!
}
const contents = [] as _Object[];
const mtimeRecords: Record<string, number> = {};
const ctimeRecords: Record<string, number> = {};
const partsConcurrency = partial ? 1 : this.s3Config.partsConcurrency;
const queueHead = new PQueue({
concurrency: partsConcurrency,
autoStart: true,
});
queueHead.on("error", (error) => {
queueHead.pause();
queueHead.clear();
throw error;
});
let isTruncated = true;
do {
const rsp = await this.s3Client.send(new ListObjectsV2Command(confCmd));
if (rsp.$metadata.httpStatusCode !== 200) {
throw Error("some thing bad while listing remote!");
}
if (rsp.Contents === undefined) {
break;
}
contents.push(...rsp.Contents);
if (this.s3Config.useAccurateMTime) {
// head requests of all objects, love it
for (const content of rsp.Contents) {
queueHead.add(async () => {
const rspHead = await this.s3Client.send(
new HeadObjectCommand({
Bucket: this.s3Config.s3BucketName,
Key: content.Key,
})
);
if (rspHead.$metadata.httpStatusCode !== 200) {
throw Error("some thing bad while heading single object!");
}
if (rspHead.Metadata === undefined) {
// pass
} else {
mtimeRecords[content.Key!] = Math.floor(
Number.parseFloat(
rspHead.Metadata.mtime || rspHead.Metadata.MTime || "0"
)
);
ctimeRecords[content.Key!] = Math.floor(
Number.parseFloat(
rspHead.Metadata.ctime || rspHead.Metadata.CTime || "0"
)
);
}
});
}
}
if (partial) {
// do not loop over
isTruncated = false;
} else {
// loop over
isTruncated = rsp.IsTruncated ?? false;
confCmd.ContinuationToken = rsp.NextContinuationToken;
if (
isTruncated &&
(confCmd.ContinuationToken === undefined ||
confCmd.ContinuationToken === "")
) {
throw Error("isTruncated is true but no continuationToken provided");
}
}
} while (isTruncated);
// wait for any head requests
await queueHead.onIdle();
// ensemble fake rsp
// in the end, we need to transform the response list
// back to the local contents-alike list
const res: Entity[] = [];
const realEnrities = new Set<string>();
for (const remoteObj of contents) {
const remoteEntity = fromS3ObjectToEntity(
remoteObj,
this.s3Config.remotePrefix ?? "",
mtimeRecords,
ctimeRecords
);
realEnrities.add(remoteEntity.key!);
res.push(remoteEntity);
for (const f of getFolderLevels(remoteEntity.key!, true)) {
if (realEnrities.has(f)) {
delete this.synthFoldersCache[f];
continue;
}
if (
!this.synthFoldersCache.hasOwnProperty(f) ||
remoteEntity.mtimeSvr! >= this.synthFoldersCache[f].mtimeSvr!
) {
this.synthFoldersCache[f] = {
key: f,
keyRaw: f,
size: 0,
sizeRaw: 0,
sizeEnc: 0,
mtimeSvr: remoteEntity.mtimeSvr,
mtimeSvrFmt: remoteEntity.mtimeSvrFmt,
mtimeCli: remoteEntity.mtimeCli,
mtimeCliFmt: remoteEntity.mtimeCliFmt,
synthesizedFolder: true,
};
}
}
}
for (const key of Object.keys(this.synthFoldersCache)) {
res.push(this.synthFoldersCache[key]);
}
return res;
}
async stat(key: string): Promise<Entity> {
if (this.synthFoldersCache.hasOwnProperty(key)) {
return this.synthFoldersCache[key];
}
let keyFullPath = key;
keyFullPath = getRemoteWithPrefixPath(
keyFullPath,
this.s3Config.remotePrefix ?? ""
);
return await this._statFromRoot(keyFullPath);
}
/**
* the input key contains basedir (prefix),
* but the result doesn't contain it.
*/
async _statFromRoot(key: string): Promise<Entity> {
if (
this.s3Config.remotePrefix !== undefined &&
this.s3Config.remotePrefix !== "" &&
!key.startsWith(this.s3Config.remotePrefix)
) {
throw Error(`_statFromRoot should only accept prefix-ed path`);
}
const res = await this.s3Client.send(
new HeadObjectCommand({
Bucket: this.s3Config.s3BucketName,
Key: key,
})
);
return fromS3HeadObjectToEntity(
key,
res,
this.s3Config.remotePrefix ?? "",
this.s3Config.useAccurateMTime ?? false
);
}
async mkdir(key: string, mtime?: number, ctime?: number): Promise<Entity> {
if (!key.endsWith("/")) {
throw new Error(`You should not call mkdir on ${key}!`);
}
const generateFolderObject = this.s3Config.generateFolderObject ?? false;
if (!generateFolderObject) {
const synth = {
key: key,
keyRaw: key,
size: 0,
sizeRaw: 0,
sizeEnc: 0,
mtimeSvr: mtime,
mtimeCli: mtime,
synthesizedFolder: true,
};
this.synthFoldersCache[key] = synth;
return synth;
}
const uploadFile = getRemoteWithPrefixPath(
key,
this.s3Config.remotePrefix ?? ""
);
return await this._mkdirFromRoot(uploadFile, mtime, ctime);
}
async _mkdirFromRoot(key: string, mtime?: number, ctime?: number) {
if (
this.s3Config.remotePrefix !== undefined &&
this.s3Config.remotePrefix !== "" &&
!key.startsWith(this.s3Config.remotePrefix)
) {
throw Error(`_mkdirFromRoot should only accept prefix-ed path`);
}
const contentType = DEFAULT_CONTENT_TYPE;
const p: PutObjectCommandInput = {
Bucket: this.s3Config.s3BucketName,
Key: key,
Body: "",
ContentType: contentType,
ContentLength: 0, // interesting we need to set this to avoid the warning
};
const metadata: Record<string, string> = {};
if (mtime !== undefined && mtime !== 0) {
metadata["MTime"] = `${mtime / 1000.0}`;
}
if (ctime !== undefined && ctime !== 0) {
metadata["CTime"] = `${ctime / 1000.0}`;
}
if (Object.keys(metadata).length > 0) {
p["Metadata"] = metadata;
}
await this.s3Client.send(new PutObjectCommand(p));
return await this._statFromRoot(key);
}
async writeFile(
key: string,
content: ArrayBuffer,
mtime: number,
ctime: number
): Promise<Entity> {
const uploadFile = getRemoteWithPrefixPath(
key,
this.s3Config.remotePrefix ?? ""
);
const res = await this._writeFileFromRoot(
uploadFile,
content,
mtime,
ctime
);
return res;
}
/**
* the input key contains basedir (prefix),
* but the result doesn't contain it.
*/
async _writeFileFromRoot(
key: string,
content: ArrayBuffer,
mtime: number,
ctime: number
): Promise<Entity> {
if (
this.s3Config.remotePrefix !== undefined &&
this.s3Config.remotePrefix !== "" &&
!key.startsWith(this.s3Config.remotePrefix)
) {
throw Error(`_writeFileFromRoot should only accept prefix-ed path`);
}
const bytesIn5MB = 5242880;
const body = new Uint8Array(content);
let contentType = DEFAULT_CONTENT_TYPE;
contentType =
mime.contentType(mime.lookup(key) || DEFAULT_CONTENT_TYPE) ||
DEFAULT_CONTENT_TYPE;
const upload = new Upload({
client: this.s3Client,
queueSize: this.s3Config.partsConcurrency, // concurrency
partSize: bytesIn5MB, // minimal 5MB by default
leavePartsOnError: false,
params: {
Bucket: this.s3Config.s3BucketName,
Key: key,
Body: body,
ContentType: contentType,
Metadata: {
MTime: `${mtime / 1000.0}`,
CTime: `${ctime / 1000.0}`,
},
},
});
upload.on("httpUploadProgress", (progress) => {
// console.info(progress);
});
await upload.done();
return await this._statFromRoot(key);
}
async readFile(key: string): Promise<ArrayBuffer> {
if (key.endsWith("/")) {
throw new Error(`you should not call readFile on folder ${key}`);
}
const downloadFile = getRemoteWithPrefixPath(
key,
this.s3Config.remotePrefix ?? ""
);
return await this._readFileFromRoot(downloadFile);
}
async _readFileFromRoot(key: string): Promise<ArrayBuffer> {
if (
this.s3Config.remotePrefix !== undefined &&
this.s3Config.remotePrefix !== "" &&
!key.startsWith(this.s3Config.remotePrefix)
) {
throw Error(`_readFileFromRoot should only accept prefix-ed path`);
}
const data = await this.s3Client.send(
new GetObjectCommand({
Bucket: this.s3Config.s3BucketName,
Key: key,
})
);
const bodyContents = await getObjectBodyToArrayBuffer(data.Body);
return bodyContents;
}
async rename(key1: string, key2: string): Promise<void> {
throw Error(`rename not implemented for s3`);
}
async rm(key: string): Promise<void> {
if (key === "/") {
return;
}
if (this.synthFoldersCache.hasOwnProperty(key)) {
delete this.synthFoldersCache[key];
return;
}
const remoteFileName = getRemoteWithPrefixPath(
key,
this.s3Config.remotePrefix ?? ""
);
await this.s3Client.send(
new DeleteObjectCommand({
Bucket: this.s3Config.s3BucketName,
Key: remoteFileName,
})
);
// TODO: do we need to delete folder recursively?
// maybe we should not
// because the outer sync algorithm should do that
// (await this._walkFromRoot(remoteFileName)).map(...)
}
async checkConnect(callbackFunc?: any): Promise<boolean> {
try {
// const results = await this.s3Client.send(
// new HeadBucketCommand({ Bucket: this.s3Config.s3BucketName })
// );
// very simplified version of listing objects
const confCmd = {
Bucket: this.s3Config.s3BucketName,
} as ListObjectsV2CommandInput;
const results = await this.s3Client.send(
new ListObjectsV2Command(confCmd)
);
if (
results === undefined ||
results.$metadata === undefined ||
results.$metadata.httpStatusCode === undefined
) {
const err = "results or $metadata or httStatusCode is undefined";
console.debug(err);
if (callbackFunc !== undefined) {
callbackFunc(err);
}
return false;
}
return results.$metadata.httpStatusCode === 200;
} catch (err: any) {
console.debug(err);
if (callbackFunc !== undefined) {
if (this.s3Config.s3Endpoint.contains(this.s3Config.s3BucketName)) {
const err2 = new AggregateError([
err,
new Error(
"Maybe you've included the bucket name inside the endpoint setting. Please remove the bucket name and try again."
),
]);
callbackFunc(err2);
} else {
callbackFunc(err);
}
}
return false;
}
}
async getUserDisplayName(): Promise<string> {
throw new Error("Method not implemented.");
}
async revokeAuth() {
throw new Error("Method not implemented.");
}
allowEmptyFile(): boolean {
return true;
}
}

850
src/fsWebdav.ts Normal file
View File

@ -0,0 +1,850 @@
import { Buffer } from "buffer";
import { Queue } from "@fyears/tsqueue";
import { getReasonPhrase } from "http-status-codes/build/cjs/utils-functions";
import chunk from "lodash/chunk";
import cloneDeep from "lodash/cloneDeep";
import flatten from "lodash/flatten";
import isString from "lodash/isString";
import { nanoid } from "nanoid";
import { Platform, type RequestUrlParam, requestUrl } from "obsidian";
import type {
FileStat,
RequestOptionsWithState,
WebDAVClient,
// Response,
// ResponseDataDetailed,
} from "webdav";
import type { Entity, WebdavConfig } from "./baseTypes";
import { VALID_REQURL } from "./baseTypesObs";
import { FakeFs } from "./fsAll";
import { bufferToArrayBuffer, delay, splitFileSizeToChunkRanges } from "./misc";
/**
* https://stackoverflow.com/questions/32850898/how-to-check-if-a-string-has-any-non-iso-8859-1-characters-with-javascript
* @param str
* @returns true if all are iso 8859 1 chars
*/
function onlyAscii(str: string) {
return !/[^\u0000-\u00ff]/g.test(str);
}
/**
* https://stackoverflow.com/questions/12539574/
* @param obj
* @returns
*/
function objKeyToLower(obj: Record<string, string>) {
return Object.fromEntries(
Object.entries(obj).map(([k, v]) => [k.toLowerCase(), v])
);
}
// @ts-ignore
import { getPatcher } from "webdav/dist/web/index.js";
if (VALID_REQURL) {
getPatcher().patch(
"request",
async (options: RequestOptionsWithState): Promise<Response> => {
const transformedHeaders = objKeyToLower({ ...options.headers });
delete transformedHeaders["host"];
delete transformedHeaders["content-length"];
const reqContentType =
transformedHeaders["accept"] ?? transformedHeaders["content-type"];
const retractedHeaders = { ...transformedHeaders };
if (retractedHeaders.hasOwnProperty("authorization")) {
retractedHeaders["authorization"] = "<retracted>";
}
// console.debug(`before request:`);
// console.debug(`url: ${options.url}`);
// console.debug(`method: ${options.method}`);
// console.debug(`headers: ${JSON.stringify(retractedHeaders, null, 2)}`);
// console.debug(`reqContentType: ${reqContentType}`);
const p: RequestUrlParam = {
url: options.url,
method: options.method,
body: options.data as string | ArrayBuffer,
headers: transformedHeaders,
contentType: reqContentType,
throw: false,
};
let r = await requestUrl(p);
if (
r.status === 401 &&
Platform.isIosApp &&
!options.url.endsWith("/") &&
!options.url.endsWith(".md") &&
options.method.toUpperCase() === "PROPFIND"
) {
// don't ask me why,
// some webdav servers have some mysterious behaviours,
// if a folder doesn't exist without slash, the servers return 401 instead of 404
// here is a dirty hack that works
console.debug(`so we have 401, try appending request url with slash`);
p.url = `${options.url}/`;
r = await requestUrl(p);
}
// console.debug(`after request:`);
const rspHeaders = objKeyToLower({ ...r.headers });
// console.debug(`rspHeaders: ${JSON.stringify(rspHeaders, null, 2)}`);
for (const key in rspHeaders) {
if (rspHeaders.hasOwnProperty(key)) {
// avoid the error:
// Failed to read the 'headers' property from 'ResponseInit': String contains non ISO-8859-1 code point.
// const possibleNonAscii = [
// "Content-Disposition",
// "X-Accel-Redirect",
// "X-Outfilename",
// "X-Sendfile"
// ];
// for (const p of possibleNonAscii) {
// if (key === p || key === p.toLowerCase()) {
// rspHeaders[key] = encodeURIComponent(rspHeaders[key]);
// }
// }
if (!onlyAscii(rspHeaders[key])) {
// console.debug(`rspHeaders[key] needs encode: ${key}`);
rspHeaders[key] = encodeURIComponent(rspHeaders[key]);
}
}
}
let r2: Response | undefined = undefined;
const statusText = getReasonPhrase(r.status);
// console.debug(`statusText: ${statusText}`);
if ([101, 103, 204, 205, 304].includes(r.status)) {
// A null body status is a status that is 101, 103, 204, 205, or 304.
// https://fetch.spec.whatwg.org/#statuses
// fix this: Failed to construct 'Response': Response with null body status cannot have body
r2 = new Response(null, {
status: r.status,
statusText: statusText,
headers: rspHeaders,
});
} else {
r2 = new Response(r.arrayBuffer, {
status: r.status,
statusText: statusText,
headers: rspHeaders,
});
}
return r2;
}
);
}
// @ts-ignore
// biome-ignore lint: we want to ts-ignore the next line
import { AuthType, BufferLike, createClient } from "webdav/dist/web/index.js";
export const DEFAULT_WEBDAV_CONFIG = {
address: "",
username: "",
password: "",
authType: "basic",
manualRecursive: true,
depth: "manual_1",
remoteBaseDir: "",
} as WebdavConfig;
const getWebdavPath = (fileOrFolderPath: string, remoteBaseDir: string) => {
let key = fileOrFolderPath;
if (fileOrFolderPath === "/" || fileOrFolderPath === "") {
// special
key = `/${remoteBaseDir}/`;
} else if (fileOrFolderPath.startsWith("/")) {
console.warn(
`why the path ${fileOrFolderPath} starts with '/'? but we just go on.`
);
key = `/${remoteBaseDir}${fileOrFolderPath}`;
} else {
key = `/${remoteBaseDir}/${fileOrFolderPath}`;
}
return key;
};
const getNormPath = (fileOrFolderPath: string, remoteBaseDir: string) => {
if (
!(
fileOrFolderPath === `/${remoteBaseDir}` ||
fileOrFolderPath.startsWith(`/${remoteBaseDir}/`)
)
) {
throw Error(
`"${fileOrFolderPath}" doesn't starts with "/${remoteBaseDir}/"`
);
}
return fileOrFolderPath.slice(`/${remoteBaseDir}/`.length);
};
const fromWebdavItemToEntity = (x: FileStat, remoteBaseDir: string): Entity => {
let key = getNormPath(x.filename, remoteBaseDir);
if (x.type === "directory" && !key.endsWith("/")) {
key = `${key}/`;
}
const mtimeSvr = Date.parse(x.lastmod).valueOf();
return {
key: key,
keyRaw: key,
mtimeSvr: mtimeSvr,
mtimeCli: mtimeSvr, // TODO: no universal way to set mtime in webdav
size: x.size,
sizeRaw: x.size,
};
};
const tryEncodeURI = (x: string) => {
if (x.includes("%")) {
// likely encoded before!
return x;
}
return encodeURI(x);
};
export class FakeFsWebdav extends FakeFs {
kind: "webdav";
webdavConfig: WebdavConfig;
remoteBaseDir: string;
client!: WebDAVClient;
vaultFolderExists: boolean;
saveUpdatedConfigFunc: () => Promise<any>;
supportNativePartial: boolean;
isNextcloud: boolean;
nextcloudUploadServerAddress: string;
constructor(
webdavConfig: WebdavConfig,
vaultName: string,
saveUpdatedConfigFunc: () => Promise<any>
) {
super();
this.kind = "webdav";
this.webdavConfig = cloneDeep(webdavConfig);
this.webdavConfig.address = tryEncodeURI(this.webdavConfig.address);
this.remoteBaseDir = this.webdavConfig.remoteBaseDir || vaultName || "";
this.vaultFolderExists = false;
this.saveUpdatedConfigFunc = saveUpdatedConfigFunc;
this.supportNativePartial = false;
this.isNextcloud = false;
this.nextcloudUploadServerAddress = "";
}
async _init() {
// init client if not inited
if (this.client !== undefined) {
return;
}
const headers = {
"Cache-Control": "no-cache",
};
if (
this.webdavConfig.username !== "" &&
this.webdavConfig.password !== ""
) {
this.client = createClient(this.webdavConfig.address, {
username: this.webdavConfig.username,
password: this.webdavConfig.password,
headers: headers,
authType:
this.webdavConfig.authType === "digest"
? AuthType.Digest
: AuthType.Password,
});
} else {
console.info("no password");
this.client = createClient(this.webdavConfig.address, {
headers: headers,
});
}
// check vault folder
if (this.vaultFolderExists) {
// pass
} else {
const res = await this.client.exists(`/${this.remoteBaseDir}/`);
if (res) {
// console.info("remote vault folder exits!");
this.vaultFolderExists = true;
} else {
console.info("remote vault folder not exists, creating");
await this.client.createDirectory(`/${this.remoteBaseDir}/`);
console.info("remote vault folder created!");
this.vaultFolderExists = true;
}
}
// adjust depth parameter
if (
this.webdavConfig.depth === "auto" ||
this.webdavConfig.depth === "auto_1" ||
this.webdavConfig.depth === "auto_infinity" ||
this.webdavConfig.depth === "auto_unknown"
) {
this.webdavConfig.depth = "manual_1";
this.webdavConfig.manualRecursive = true;
if (this.saveUpdatedConfigFunc !== undefined) {
await this.saveUpdatedConfigFunc();
console.info(
`webdav depth="auto_???" is changed to ${this.webdavConfig.depth}`
);
}
}
await this._checkPartialSupport();
}
/**
* <server>/remote.php/dav/files/<userid>
* => <server>/remote.php/dav/uploads/<userid>
*/
_getnextcloudUploadServerAddress = () => {
let k = this.webdavConfig.address;
if (k.endsWith("/")) {
// no tailing slash
k = k.substring(0, k.length - 1);
}
const s = k.split("/");
if (
s.length > 3 &&
s[s.length - 3] === "dav" &&
s[s.length - 2] === "files" &&
s[s.length - 1] !== ""
) {
s[s.length - 2] = "uploads";
return s.join("/");
}
throw Error(`cannot construct upload address for ${s}`);
};
async _checkPartialSupport() {
const compliance = await this.client.getDAVCompliance(
`/${this.remoteBaseDir}/`
);
for (const c of compliance.compliance) {
// nextcloud AND with an account
if (
c.toLocaleLowerCase().includes("nextcloud") &&
this.webdavConfig.username !== "" &&
this.webdavConfig.password !== ""
) {
// the address is parsable
const s = this.webdavConfig.address.split("/");
if (
s.length > 3 &&
s[s.length - 3] === "dav" &&
s[s.length - 2] === "files" &&
s[s.length - 1] !== ""
) {
this.isNextcloud = true;
this.nextcloudUploadServerAddress =
this._getnextcloudUploadServerAddress();
console.debug(
`isNextcloud=${this.isNextcloud}, uploadFolder=${this.nextcloudUploadServerAddress}`
);
return true;
} else {
return false;
}
}
}
// taken from https://github.com/perry-mitchell/webdav-client/blob/master/source/operations/partialUpdateFileContents.ts
// which is under MIT license
if (
(compliance.server.includes("Apache") &&
compliance.compliance.includes(
"<http://apache.org/dav/propset/fs/1>"
)) ||
compliance.compliance.includes("sabredav-partialupdate")
) {
this.supportNativePartial = true;
console.debug(`supportNativePartial=true`);
return true;
}
return false;
}
async walk(): Promise<Entity[]> {
await this._init();
let contents = [] as FileStat[];
if (
this.webdavConfig.depth === "auto" ||
this.webdavConfig.depth === "auto_unknown" ||
this.webdavConfig.depth === "auto_1" ||
this.webdavConfig.depth === "auto_infinity" /* don't trust auto now */ ||
this.webdavConfig.depth === "manual_1"
) {
// the remote doesn't support infinity propfind,
// we need to do a bfs here
const q = new Queue([`/${this.remoteBaseDir}`]);
const CHUNK_SIZE = 10;
while (q.length > 0) {
const itemsToFetch: string[] = [];
while (q.length > 0) {
itemsToFetch.push(q.pop()!);
}
const itemsToFetchChunks = chunk(itemsToFetch, CHUNK_SIZE);
// console.debug(itemsToFetchChunks);
const subContents = [] as FileStat[];
for (const singleChunk of itemsToFetchChunks) {
const r = singleChunk.map((x) => {
return this.client.getDirectoryContents(x, {
deep: false,
details: false /* no need for verbose details here */,
// TODO: to support .obsidian,
// we need to load all files including dot,
// anyway to reduce the resources?
// glob: "/**" /* avoid dot files by using glob */,
}) as Promise<FileStat[]>;
});
const r3 = await Promise.all(r);
for (const r4 of r3) {
if (
this.webdavConfig.address.includes("jianguoyun.com") &&
r4.length >= 749
) {
// https://help.jianguoyun.com/?p=2064
// no more than 750 per request
throw Error(
`出错:坚果云 api 有限制,文件列表加载不全。终止同步!`
);
}
}
const r2 = flatten(r3);
subContents.push(...r2);
}
for (let i = 0; i < subContents.length; ++i) {
const f = subContents[i];
contents.push(f);
if (f.type === "directory") {
q.push(f.filename);
}
}
}
} else {
// the remote supports infinity propfind
contents = (await this.client.getDirectoryContents(
`/${this.remoteBaseDir}`,
{
deep: true,
details: false /* no need for verbose details here */,
// TODO: to support .obsidian,
// we need to load all files including dot,
// anyway to reduce the resources?
// glob: "/**" /* avoid dot files by using glob */,
}
)) as FileStat[];
}
return contents.map((x) => fromWebdavItemToEntity(x, this.remoteBaseDir));
}
async walkPartial(): Promise<Entity[]> {
await this._init();
const contents = (await this.client.getDirectoryContents(
`/${this.remoteBaseDir}`,
{
deep: false, // partial, no need to recursive here
details: false /* no need for verbose details here */,
}
)) as FileStat[];
return contents.map((x) => fromWebdavItemToEntity(x, this.remoteBaseDir));
}
async stat(key: string): Promise<Entity> {
await this._init();
const fullPath = getWebdavPath(key, this.remoteBaseDir);
return await this._statFromRoot(fullPath);
}
async _statFromRoot(key: string): Promise<Entity> {
const res = (await this.client.stat(key, {
details: false,
})) as FileStat;
return fromWebdavItemToEntity(res, this.remoteBaseDir);
}
async mkdir(key: string, mtime?: number, ctime?: number): Promise<Entity> {
if (!key.endsWith("/")) {
throw Error(`you should not call mkdir on ${key}`);
}
await this._init();
const uploadFile = getWebdavPath(key, this.remoteBaseDir);
return await this._mkdirFromRoot(uploadFile, mtime, ctime);
}
async _mkdirFromRoot(
key: string,
mtime?: number,
ctime?: number
): Promise<Entity> {
// the sync algorithm should do recursive manually already.
// if we set recursive: true here, Digest auth will return some error inside the PROPFIND
await this.client.createDirectory(key, {
recursive: false,
});
return await this._statFromRoot(key);
}
async writeFile(
key: string,
content: ArrayBuffer,
mtime: number,
ctime: number
): Promise<Entity> {
if (key.endsWith("/")) {
throw Error(`you should not call writeFile on ${key}`);
}
await this._init();
const uploadFile = getWebdavPath(key, this.remoteBaseDir);
return await this._writeFileFromRoot(
uploadFile,
content,
mtime,
ctime,
key
);
}
async _writeFileFromRoot(
key: string,
content: ArrayBuffer,
mtime: number,
ctime: number,
origKey: string
): Promise<Entity> {
// less than 10 MB
if (content.byteLength <= 10 * 1024 * 1024) {
return await this._writeFileFromRootFull(
key,
content,
mtime,
ctime,
origKey
);
}
// larger than 10 MB
if (!this.isNextcloud && !this.supportNativePartial) {
// give up and upload by whole, and directly return
return await this._writeFileFromRootFull(
key,
content,
mtime,
ctime,
origKey
);
}
// try to upload by chunks
try {
if (this.isNextcloud) {
return await this._writeFileFromRootNextcloud(
key,
content,
mtime,
ctime,
origKey
);
} else if (this.supportNativePartial) {
return await this._writeFileFromRootNativePartial(
key,
content,
mtime,
ctime,
origKey
);
}
throw Error(`Error: partial upload / update method is not implemented??`);
} catch (e) {
console.error(
`we fail to write file partially for nextcloud or apache or sabre/dav, stop!`
);
console.error(e);
throw e;
// this.isNextcloud = false;
// this.supportNativePartial = false;
// return await this._writeFileFromRootFull(
// key,
// content,
// mtime,
// ctime,
// origKey
// );
}
}
async _writeFileFromRootFull(
key: string,
content: ArrayBuffer,
mtime: number,
ctime: number,
origKey: string
): Promise<Entity> {
// console.debug(`start _writeFileFromRootFull`);
await this.client.putFileContents(key, content, {
overwrite: true,
onUploadProgress: (progress: any) => {
console.info(`Uploaded ${progress.loaded} bytes of ${progress.total}`);
},
});
const k = await this._statFromRoot(key);
// console.debug(`end _writeFileFromRootFull`);
return k;
}
/**
* https://docs.nextcloud.com/server/latest/developer_manual/client_apis/WebDAV/chunking.html
* @param key
* @param content
* @param mtime
* @param ctime
* @returns
*/
async _writeFileFromRootNextcloud(
key: string,
content: ArrayBuffer,
mtime: number,
ctime: number,
origKey: string
): Promise<Entity> {
if (key.endsWith("/")) {
throw Error(
`key=${key} should not have tailing slash in _writeFileFromRootNextcloud`
);
}
const destUrl = `${this.webdavConfig.address}/${encodeURI(key)}`;
console.debug(`destUrl=${destUrl}`);
const getTmpFolder = (x: string) => {
if (x.endsWith("/")) {
throw Error(`file to upload by chunk should not ends with /`);
}
const y = x.split("/");
const z = encodeURI(`${y[y.length - 1]}`);
return z;
};
const uploadServerAddress = this.nextcloudUploadServerAddress;
console.debug(`uploadServerAddress=${uploadServerAddress}`);
const tmpFolderName = getTmpFolder(key);
console.debug(`tmpFolderName=${tmpFolderName}`);
const clientForUpload = createClient(uploadServerAddress, {
username: this.webdavConfig.username,
password: this.webdavConfig.password,
headers: {
"Cache-Control": "no-cache",
},
authType:
this.webdavConfig.authType === "digest"
? AuthType.Digest
: AuthType.Password,
});
// create folder
await clientForUpload.createDirectory(tmpFolderName, {
method: "MKCOL",
headers: {
Destination: destUrl,
},
});
console.debug(`finish creating folder`);
// upload by chunks
const sizePerChunk = 5 * 1024 * 1024; // 5 mb
const chunkRanges = splitFileSizeToChunkRanges(
content.byteLength,
sizePerChunk
);
for (let i = 0; i < chunkRanges.length; ++i) {
const { start, end } = chunkRanges[i];
const tmpFileName = `${i + 1}`.padStart(5, "0");
const tmpFileNameWithFolder = `${tmpFolderName}/${tmpFileName}`;
console.debug(
`start to upload chunk ${
i + 1
} to ${tmpFileNameWithFolder} with startInclusive=${start}, endInclusive=${end}`
);
await clientForUpload.putFileContents(
tmpFileNameWithFolder,
content.slice(start, end + 1),
{
headers: {
Destination: destUrl,
"OC-Total-Length": `${content.byteLength}`,
},
}
);
}
console.debug(`finish upload all chunks`);
// move to assemble
const fakeFileToMoveUrl = `${tmpFolderName}/.file`;
console.debug(`fakeFileToMoveUrl=${fakeFileToMoveUrl}`);
await clientForUpload.customRequest(fakeFileToMoveUrl, {
method: "MOVE",
headers: {
Destination: destUrl,
"OC-Total-Length": `${content.byteLength}`,
},
});
console.debug(`finish moving file`);
// TODO: setting X-OC-Mtime
// stat
console.debug(`before stat origKey=${origKey}`);
const k = await this.stat(origKey);
console.debug(`after stat`);
if (k.sizeRaw !== content.byteLength) {
// we failed!
this.isNextcloud = false; // give up next time!
const err = `unable to upload file ${key} by chunks to nextcloud`;
console.error(err);
throw Error(err);
}
console.debug(`after stat, k=${JSON.stringify(k, null, 2)}`);
return k;
}
async _writeFileFromRootNativePartial(
key: string,
content: ArrayBuffer,
mtime: number,
ctime: number,
origKey: string
): Promise<Entity> {
// firstly upload a 0-byte data
await this._writeFileFromRootFull(
key,
new ArrayBuffer(0),
mtime,
ctime,
origKey
);
// then "update" by chunks
const sizePerChunk = 5 * 1024 * 1024; // 5 mb
const chunkRanges = splitFileSizeToChunkRanges(
content.byteLength,
sizePerChunk
);
for (let i = 0; i < chunkRanges.length; ++i) {
const { start, end } = chunkRanges[i];
await this.client.partialUpdateFileContents(
key,
start,
end,
content.slice(start, end + 1)
);
}
// lastly return
return await this.stat(origKey);
}
async readFile(key: string): Promise<ArrayBuffer> {
if (key.endsWith("/")) {
throw Error(`you should not call readFile on ${key}`);
}
await this._init();
const downloadFile = getWebdavPath(key, this.remoteBaseDir);
return await this._readFileFromRoot(downloadFile);
}
async _readFileFromRoot(key: string): Promise<ArrayBuffer> {
const buff = (await this.client.getFileContents(key)) as BufferLike;
if (buff instanceof ArrayBuffer) {
return buff;
} else if (buff instanceof Buffer) {
return bufferToArrayBuffer(buff);
}
throw Error(`unexpected file content result with type ${typeof buff}`);
}
async rename(key1: string, key2: string): Promise<void> {
if (key1 === "/" || key2 === "/") {
return;
}
const remoteFileName1 = getWebdavPath(key1, this.remoteBaseDir);
const remoteFileName2 = getWebdavPath(key2, this.remoteBaseDir);
await this._init();
await this.client.moveFile(remoteFileName1, remoteFileName2);
}
async rm(key: string): Promise<void> {
if (key === "/") {
return;
}
await this._init();
try {
const remoteFileName = getWebdavPath(key, this.remoteBaseDir);
await this.client.deleteFile(remoteFileName);
// console.info(`delete ${remoteFileName} succeeded`);
} catch (err) {
console.error("some error while deleting");
console.error(err);
}
}
async checkConnect(callbackFunc?: any): Promise<boolean> {
if (
!(
this.webdavConfig.address.startsWith("http://") ||
this.webdavConfig.address.startsWith("https://")
)
) {
const err =
"Error: the url should start with http(s):// but it does not!";
console.error(err);
if (callbackFunc !== undefined) {
callbackFunc(err);
}
return false;
}
try {
await this._init();
const results = await this._statFromRoot(`/${this.remoteBaseDir}/`);
if (results === undefined) {
const err = "results is undefined";
console.error(err);
callbackFunc?.(err);
return false;
}
return true;
} catch (err) {
console.error(err);
callbackFunc?.(err);
return false;
}
}
async getUserDisplayName(): Promise<string> {
throw new Error("Method not implemented.");
}
async revokeAuth() {
throw new Error("Method not implemented.");
}
allowEmptyFile(): boolean {
return true;
}
}

274
src/fsWebdis.ts Normal file
View File

@ -0,0 +1,274 @@
import { isEqual } from "lodash";
import {
DEFAULT_CONTENT_TYPE,
type Entity,
type WebdisConfig,
} from "./baseTypes";
import { FakeFs } from "./fsAll";
export const DEFAULT_WEBDIS_CONFIG: WebdisConfig = {
address: "",
username: "",
password: "",
remoteBaseDir: "",
};
const getWebdisPath = (fileOrFolderPath: string, remoteBaseDir: string) => {
let key = fileOrFolderPath;
if (fileOrFolderPath === "/" || fileOrFolderPath === "") {
// special
key = `${remoteBaseDir}`;
} else if (fileOrFolderPath.startsWith("/")) {
console.warn(
`why the path ${fileOrFolderPath} starts with '/'? but we just go on.`
);
key = `${remoteBaseDir}${fileOrFolderPath}`;
} else {
key = `${remoteBaseDir}/${fileOrFolderPath}`;
}
return `rs:fs:v1:${encodeURIComponent(key)}`; // we should encode them!!!!
};
export const getOrigPath = (fullKey: string, remoteBaseDir: string) => {
const fullKeyDecoded = decodeURIComponent(fullKey);
const prefix = `rs:fs:v1:${remoteBaseDir}/`;
// console.debug(`prefix=${prefix}`);
const suffix1 = ":meta";
const suffix2 = ":content";
if (!fullKeyDecoded.startsWith(prefix)) {
throw Error(`you should not call getOrigEntity on ${fullKey}`);
}
let realKey = fullKeyDecoded.slice(prefix.length);
// console.debug(`realKey=${realKey}`);
if (realKey.endsWith(suffix1)) {
realKey = realKey.slice(0, -suffix1.length);
// console.debug(`realKey=${realKey}`);
} else if (realKey.endsWith(suffix2)) {
realKey = realKey.slice(0, -suffix2.length);
// console.debug(`realKey=${realKey}`);
}
// console.debug(`fullKey=${fullKey}, realKey=${realKey}`);
return realKey;
};
export class FakeFsWebdis extends FakeFs {
kind: "webdis";
webdisConfig: WebdisConfig;
remoteBaseDir: string;
saveUpdatedConfigFunc: () => Promise<any>;
constructor(
webdisConfig: WebdisConfig,
vaultName: string,
saveUpdatedConfigFunc: () => Promise<any>
) {
super();
this.kind = "webdis";
this.webdisConfig = webdisConfig;
this.remoteBaseDir = this.webdisConfig.remoteBaseDir || vaultName || "";
this.saveUpdatedConfigFunc = saveUpdatedConfigFunc;
}
async _fetchCommand(
method: "GET" | "POST" | "PUT",
urlPath: string,
content?: ArrayBuffer
) {
const address = this.webdisConfig.address;
if (!address.startsWith(`https://`) && !address.startsWith(`http://`)) {
throw Error(
`your webdis server address should start with https:// or http://`
);
}
if (address.endsWith("/")) {
throw Error(`your webdis server should not ends with /`);
}
if (content !== undefined && method !== "PUT") {
throw Error(`you can only "POST" ArrayBuffer, not using other methods`);
}
const fullUrl = `${address}/${urlPath}`;
// console.debug(`fullUrl=${fullUrl}`)
const username = this.webdisConfig.username ?? "";
const password = this.webdisConfig.password ?? "";
if (username !== "" && password !== "") {
return await fetch(fullUrl, {
method: method,
headers: {
Authorization: "Basic " + btoa(username + ":" + password),
},
body: content,
});
} else if (username === "" && password === "") {
return await fetch(fullUrl, {
method: method,
body: content,
});
} else {
throw Error(
`your username and password should both be empty or not empty!`
);
}
}
async walk(): Promise<Entity[]> {
let cursor = "0";
const res: Entity[] = [];
do {
const command = `SCAN/${cursor}/MATCH/rs:fs:v1:*:meta/COUNT/1000`;
const rsp = (await (await this._fetchCommand("GET", command)).json())[
"SCAN"
];
// console.debug(rsp);
cursor = rsp[0];
for (const fullKeyWithMeta of rsp[1]) {
const realKey = getOrigPath(fullKeyWithMeta, this.remoteBaseDir);
res.push(await this.stat(realKey));
}
} while (cursor !== "0");
// console.debug(`walk res:`);
// console.debug(res);
return res;
}
async walkPartial(): Promise<Entity[]> {
let cursor = "0";
const res: Entity[] = [];
const command = `SCAN/${cursor}/MATCH/rs:fs:v1:*:meta/COUNT/10`; // fewer keys
const rsp = (await (await this._fetchCommand("GET", command)).json())[
"SCAN"
];
// console.debug(rsp);
cursor = rsp[0];
for (const fullKeyWithMeta of rsp[1]) {
const realKey = getOrigPath(fullKeyWithMeta, this.remoteBaseDir);
res.push(await this.stat(realKey));
}
// no need to loop over cursor
// console.debug(`walk res:`);
// console.debug(res);
return res;
}
async stat(key: string): Promise<Entity> {
const fullKey = getWebdisPath(key, this.remoteBaseDir);
return await this._statFromRaw(fullKey);
}
async _statFromRaw(key: string): Promise<Entity> {
// console.debug(`_statFromRaw on ${key}`);
const command = `HGETALL/${key}:meta`;
const rsp = (await (await this._fetchCommand("GET", command)).json())[
"HGETALL"
];
// console.debug(`rsp: ${JSON.stringify(rsp, null, 2)}`);
if (isEqual(rsp, {})) {
// empty!
throw Error(`key ${key} doesn't exist!`);
}
const realKey = getOrigPath(key, this.remoteBaseDir);
return {
key: realKey,
keyRaw: realKey,
mtimeCli: Number.parseInt(rsp["mtime"]),
mtimeSvr: Number.parseInt(rsp["mtime"]),
size: Number.parseInt(rsp["size"]),
sizeRaw: Number.parseInt(rsp["size"]),
};
}
async mkdir(key: string, mtime?: number, ctime?: number): Promise<Entity> {
let command = `HSET/${getWebdisPath(key, this.remoteBaseDir)}:meta/size/0`;
if (mtime !== undefined && mtime !== 0) {
command = `${command}/mtime/${mtime}`;
}
if (ctime !== undefined && ctime !== 0) {
command = `${command}/ctime/${ctime}`;
}
const rsp = (await (await this._fetchCommand("GET", command)).json())[
"HSET"
];
return await this.stat(key);
}
async writeFile(
key: string,
content: ArrayBuffer,
mtime: number,
ctime: number
): Promise<Entity> {
const fullKey = getWebdisPath(key, this.remoteBaseDir);
// meta
let command1 = `HSET/${fullKey}:meta/size/${content.byteLength}`;
if (mtime !== undefined && mtime !== 0) {
command1 = `${command1}/mtime/${mtime}`;
}
if (ctime !== undefined && ctime !== 0) {
command1 = `${command1}/ctime/${ctime}`;
}
const rsp1 = (await (await this._fetchCommand("GET", command1)).json())[
"HSET"
];
// content
const command2 = `SET/${fullKey}:content`;
const rsp2 = (
await (await this._fetchCommand("PUT", command2, content)).json()
)["SET"];
// fetch meta
return await this.stat(key);
}
async readFile(key: string): Promise<ArrayBuffer> {
const fullKey = getWebdisPath(key, this.remoteBaseDir);
const command = `GET/${fullKey}:content?type=${DEFAULT_CONTENT_TYPE}`;
const rsp = await (await this._fetchCommand("GET", command)).arrayBuffer();
return rsp;
}
async rename(key1: string, key2: string): Promise<void> {
const fullKey1 = getWebdisPath(key1, this.remoteBaseDir);
const fullKey2 = getWebdisPath(key2, this.remoteBaseDir);
const commandContent = `RENAME/${fullKey1}:content/${fullKey2}:content`;
await this._fetchCommand("POST", commandContent);
const commandMeta = `RENAME/${fullKey1}:meta/${fullKey2}:meta`;
await this._fetchCommand("POST", commandMeta);
}
async rm(key: string): Promise<void> {
const fullKey = getWebdisPath(key, this.remoteBaseDir);
const command = `DEL/${fullKey}:meta/${fullKey}:content`;
const rsp = (await (await this._fetchCommand("PUT", command)).json())[
"DEL"
];
}
async checkConnect(callbackFunc?: any): Promise<boolean> {
try {
const k = await (
await this._fetchCommand("GET", "PING/helloworld")
).json();
return isEqual(k, { PING: "helloworld" });
} catch (err: any) {
console.error(err);
callbackFunc?.(err);
return false;
}
}
async getUserDisplayName(): Promise<string> {
return this.webdisConfig.username || "<no usernme>";
}
async revokeAuth(): Promise<any> {
throw new Error("Method not implemented.");
}
allowEmptyFile(): boolean {
return true;
}
}

View File

@ -1,7 +1,11 @@
import merge from "lodash/merge";
import Mustache from "mustache";
import { moment } from "obsidian";
import { LANGS } from "./langs";
import { LANGS as LANGS_PRO } from "../pro/src/langs";
import { LANGS as LANGS_BASIC } from "./langs";
const LANGS = merge(LANGS_BASIC, LANGS_PRO);
export type LangType = keyof typeof LANGS;
export type LangTypeAndAuto = LangType | "auto";

View File

@ -1,13 +1,13 @@
import QRCode from "qrcode";
import cloneDeep from "lodash/cloneDeep";
import QRCode from "qrcode";
import {
COMMAND_URI,
UriParams,
RemotelySavePluginSettings,
QRExportType,
type QRExportType,
type RemotelySavePluginSettings,
type UriParams,
} from "./baseTypes";
import { getShrinkedSettings } from "./remoteForOnedrive";
import { getShrinkedSettings } from "./fsOnedrive";
export const exportQrCodeUri = async (
settings: RemotelySavePluginSettings,
@ -17,14 +17,27 @@ export const exportQrCodeUri = async (
) => {
let settings2: Partial<RemotelySavePluginSettings> = {};
if (exportFields === "all_but_oauth2") {
if (exportFields === "basic_and_advanced") {
settings2 = cloneDeep(settings);
delete settings2.s3;
delete settings2.dropbox;
delete settings2.onedrive;
delete settings2.webdav;
delete settings2.webdis;
delete settings2.googledrive;
delete settings2.pro;
} else if (exportFields === "s3") {
settings2 = { s3: cloneDeep(settings.s3) };
} else if (exportFields === "dropbox") {
settings2 = { dropbox: cloneDeep(settings.dropbox) };
} else if (exportFields === "onedrive") {
settings2 = { onedrive: getShrinkedSettings(settings.onedrive) };
} else if (exportFields === "webdav") {
settings2 = { webdav: cloneDeep(settings.webdav) };
} else if (exportFields === "webdis") {
settings2 = { webdis: cloneDeep(settings.webdis) };
} else if (exportFields === "googledrive") {
settings2 = { googledrive: cloneDeep(settings.googledrive) };
}
delete settings2.vaultRandomID;
@ -64,7 +77,7 @@ export const importQrCodeUri = (
inputParams: any,
currentVaultName: string
): ProcessQrCodeResultType => {
let params = inputParams as UriParams;
const params = inputParams as UriParams;
if (
params.func === undefined ||
params.func !== "settings" ||

View File

@ -9,9 +9,9 @@
"syncrun_syncingribbon": "{{pluginName}}: syncing from {{triggerSource}}",
"syncrun_step0": "0/8 Remotely Save is running in dry mode, thus not actual file changes would happen.",
"syncrun_step1": "1/8 Remotely Save is preparing ({{serviceType}})",
"syncrun_step2": "2/8 Starting to fetch remote meta data.",
"syncrun_step3": "3/8 Checking password correct or not.",
"syncrun_step2": "2/8 Starting to fetch remote meta data and check password.",
"syncrun_passworderr": "Something goes wrong while checking password.",
"syncrun_step3": "3/8 Starting to process remote data.",
"syncrun_step4": "4/8 Starting to fetch local meta data.",
"syncrun_step5": "5/8 Starting to fetch local prev sync data.",
"syncrun_step6": "6/8 Starting to generate sync plan.",
@ -38,6 +38,7 @@
"protocol_onedrive_connect_unknown": "Do not know how to deal with the callback: {{params}}",
"command_startsync": "start sync",
"command_drynrun": "start sync (dry run only)",
"command_exportsyncplans_1_only_change": "export sync plans (latest 1) (change part)",
"command_exportsyncplans_1": "export sync plans (latest 1)",
"command_exportsyncplans_5": "export sync plans (latest 5)",
"command_exportsyncplans_all": "export sync plans (all)",
@ -50,14 +51,14 @@
"statusbar_time_days": "Synced {{time}} days ago",
"statusbar_time_hours": "Synced {{time}} hours ago",
"statusbar_time_minutes": "Synced {{time}} minutes ago",
"statusbar_time_lessminute": "Synced less than a minute ago",
"statusbar_time_lessminute": "Synced last minute ago",
"statusbar_lastsync": "Synced {{time}} ago",
"statusbar_syncing": "Syncing...",
"statusbar_failed": "Last sync failed",
"statusbar_now": "Synced just now",
"statusbar_lastsync_label": "Last successful Sync on {{date}}",
"statusbar_lastsync_never": "Never Synced",
"statusbar_lastsync_never_label": "Never Synced before",
"modal_password_title": "Hold on and PLEASE READ ON...",
"modal_password_shortdesc": "If the field is not empty, files would be encrypted locally before being uploaded.\nIf the field is empty, then files would be uploaded without encryption.",
"modal_password_attn1": "Attention 1/5: The vault name is NOT encrypted. The plugin creates a folder with the vault name on some remote services.",
@ -115,9 +116,9 @@
"modal_sizesconflict_copynotice": "All the sizes conflicts info have been copied to the clipboard!",
"settings_basic": "Basic Settings",
"settings_password": "Encryption Password",
"settings_password_desc": "Password for E2E encryption. Empty for no password. You need to click \"Confirm\". Attention: The password and other info are saved locally. After changing the password, you need to manually delete every original files in the remote, and re-sync (so that upload) the encrypted files again.",
"settings_password_desc": "Password for E2E encryption. Empty for no password. You need to click \"Confirm\". Attention: The password and other info are saved locally. After changing the password, you need to manually delete all files from the remote location and re-sync to upload the encrypted files again.",
"settings_encryptionmethod": "Encryption Method",
"settings_encryptionmethod_desc": "Encryption method for E2E encryption. RClone Crypt format is recommended but it doesn't encrypt path structure. OpenSSL enc is the legacy format of this plugin. <b>Both are not affliated with official RClone and OpenSSL product or community.</b> Attention: After switching the method, you need to manually delete every original files in the remote and re-sync (so that upload) the encrypted files again. More info in the <a href='https://github.com/remotely-save/remotely-save/tree/master/docs/encryption'>online doc</a>.",
"settings_encryptionmethod_desc": "Encryption method for E2E encryption. The RClone Crypt format is recommended, although it does not encrypt the path structure. OpenSSL enc is the legacy format of this plugin. <b>Both are not affliated with the official RClone and OpenSSL products or communities.</b> Attention: After switching encryption methods, you need to manually delete all original files from the remote location and re-sync to upload the encrypted files again. More info is available in the <a href='https://github.com/remotely-save/remotely-save/tree/master/docs/encryption'>online doc</a>.",
"settings_encryptionmethod_rclone": "RClone Crypt (recommended)",
"settings_encryptionmethod_openssl": "OpenSSL enc (legacy)",
@ -129,22 +130,18 @@
"settings_autorun_10min": "every 10 minutes",
"settings_autorun_30min": "every 30 minutes",
"settings_runoncestartup": "Run Once On Start Up Automatically",
"settings_runoncestartup_desc": "This settings allows setting running ONCE on start up automatically. This will take effect on NEXT start up after changing. This setting, is different from \"schedule for auto run\" which starts syncing after EVERY interval.",
"settings_runoncestartup_desc": "This setting causes the sync to run once automatically at startup. Changes will take effect on the NEXT startup. This setting is different from the \"schedule for auto run\" setting, which initiates syncing after every specified interval.",
"settings_runoncestartup_notset": "(not set)",
"settings_runoncestartup_1sec": "sync once after 1 second of start up",
"settings_runoncestartup_10sec": "sync once after 10 seconds of start up",
"settings_runoncestartup_30sec": "sync once after 30 seconds of start up",
"settings_saverun": "Sync On Save (experimental)",
"settings_saverun_desc": "A sync will be triggered if a file save action happened within a few seconds. Please pay attention that syncing is potentially a heavy action and battery may be impacted. (May need to reload the plugin or restart Obsidian after changing)",
"settings_saverun_notset": "(not set)",
"settings_saverun_1sec": "check every 1 second",
"settings_saverun_5sec": "check every 5 seconds",
"settings_saverun_10sec": "check every 10 seconds (recommended)",
"settings_saverun_1min": "check every 1 minute",
"settings_synconsave": "Sync On Save (experimental)",
"settings_synconsave_desc": "If you change your file, the plugin tries to trigger a sync.",
"settings_synconsave_disable": "Disable (default)",
"settings_synconsave_enable": "Enable",
"settings_skiplargefiles": "Skip Large Files",
"settings_skiplargefiles_desc": "Skip files with sizes larger than the threshold. Here 1 MB = 10^6 bytes.",
"settings_skiplargefiles_notset": "(not set)",
"settings_ignorepaths": "Regex Of Paths To Ignore",
"settings_ignorepaths_desc": "Regex of paths of folders or files to ignore. One regex per line. The path is relative to the vault root without leading slash.",
"settings_enablestatusbar_info": "Show Last Successful Sync In Status Bar",
@ -154,7 +151,6 @@
"settings_resetstatusbar_time_desc": "Reset last successful sync time.",
"settings_resetstatusbar_button": "Reset",
"settings_resetstatusbar_notice": "Reset done!",
"settings_checkonnectivity": "Check Connectivity",
"settings_checkonnectivity_desc": "Check connectivity.",
"settings_checkonnectivity_button": "Check",
@ -187,6 +183,12 @@
"settings_s3_accuratemtime_desc": "Read the uploaded accurate last modified time for better sync algorithm. But it causes extra api requests / time / money to the S3 endpoint.",
"settings_s3_urlstyle": "S3 URL style",
"settings_s3_urlstyle_desc": "Whether to force path-style URLs for S3 objects (e.g., https://s3.amazonaws.com/*/ instead of https://*.s3.amazonaws.com/).",
"settings_s3_reverse_proxy_no_sign_url": "S3 Reverse Proxy (No Sign) Url (experimental)",
"settings_s3_reverse_proxy_no_sign_url_desc": "S3 reverse proxy url without signature. This is useful if you use a revers proxy but do not change the original credential signature. No http(s):// prefix. Leave it blank if you don't know what it is.",
"settings_s3_generatefolderobject": "Generate Folder Object Or Not",
"settings_s3_generatefolderobject_desc": "S3 doesn't have \"real\" folder. If you set \"Generate\" here (or use old version), the plugin will upload a zero-byte object endding with \"/\" to represent the folder. In the new version, the plugin skips generating folder object by default.",
"settings_s3_generatefolderobject_notgenerate": "Not generate (default)",
"settings_s3_generatefolderobject_generate": "Generate",
"settings_s3_connect_succ": "Great! The bucket can be accessed.",
"settings_s3_connect_fail": "The S3 bucket cannot be reached.",
"settings_dropbox": "Remote For Dropbox",
@ -218,8 +220,12 @@
"settings_onedrive_auth": "Auth",
"settings_onedrive_auth_desc": "Auth.",
"settings_onedrive_auth_button": "Auth",
"settings_onedrive_connect_succ": "Great! We can connect to Onedrive!",
"settings_onedrive_connect_fail": "We cannot connect to Onedrive.",
"settings_onedrive_connect_succ": "Great! We can connect to OneDrive!",
"settings_onedrive_connect_fail": "We cannot connect to OneDrive.",
"settings_onedrive_emptyfile": "Empty File Handling",
"settings_onedrive_emptyfile_desc": "OneDrive doesn't allow uploading empty file (even in its official website). Do you want to show up errors or silently skip the empty files?",
"settings_onedrive_emptyfile_skip": "Skip",
"settings_onedrive_emptyfile_error": "Error and abort",
"settings_webdav": "Remote For Webdav",
"settings_webdav_disclaimer1": "Disclaimer: The information is stored locally. Other malicious/harmful/faulty plugins may read the info. If you see any unintentional access to your webdav server, please immediately change the username and password.",
"settings_webdav_cors_os": "Obsidian desktop>=0.13.25 or iOS>=1.1.1 or Android>=1.2.1 supports bypassing CORS locally. But you are using an old version, and you're suggested to upgrade Obsidian.",
@ -232,7 +238,7 @@
"settings_webdav_password": "Password",
"settings_webdav_password_desc": "Password. Attention: the password and other info are saved locally.",
"settings_webdav_auth": "Auth Type",
"settings_webdav_auth_desc": "If no password, this option would be ignored.",
"settings_webdav_auth_desc": "If no password is provided, this option will be ignored.",
"settings_webdav_depth": "Depth Header Sent To Servers",
"settings_webdav_depth_desc": "Webdav servers should be configured to allow requests with header Depth being '1' or 'Infinity'. If you are not sure what's this, choose \"depth='1'\". If you are sure your server supports depth='infinity', please choose that to get way better performance.",
"settings_webdav_depth_1": "only supports depth='1'",
@ -240,12 +246,25 @@
"settings_webdav_connect_succ": "Great! The webdav server can be accessed.",
"settings_webdav_connect_fail": "The webdav server cannot be reached (possible to be any of address/username/password/authtype errors).",
"settings_webdav_connect_fail_withcors": "The webdav server cannot be reached (possible to be any of address/username/password/authtype/CORS errors).",
"settings_webdis": "Remote For Webdis",
"settings_webdis_disclaimer1": "Disclaimer: This app is NOT an official Redis® Ltd / Redis® OSS / Webdis product. Redis is a registered trademark of Redis Ltd.",
"settings_webdis_disclaimer2": "Disclaimer: The information is stored locally. Other malicious/harmful/faulty plugins may read the info. If you see any unintentional access to your Webdis server, please immediately change the username and password.",
"settings_webdis_folder": "We will store the value with keys prefixed by :{{remoteBaseDir}} on your server.",
"settings_webdis_addr": "Server Address",
"settings_webdis_addr_desc": "Server address.",
"settings_webdis_user": "Username",
"settings_webdis_user_desc": "Username. Attention: the username and other info are saved locally.",
"settings_webdis_password": "Password",
"settings_webdis_password_desc": "Password. Attention: the password and other info are saved locally.",
"settings_webdis_connect_succ": "Great! The Webdis server can be accessed.",
"settings_webdis_connect_fail": "The Webdis server cannot be reached (possible to be any of address/username/password errors).",
"settings_chooseservice": "Choose A Remote Service",
"settings_chooseservice_desc": "Start here. What service are you connecting to? S3, Dropbox, Webdav, or OneDrive for personal?",
"settings_chooseservice_desc": "Start here. What service are you connecting to? S3, Dropbox, Webdav, OneDrive for personal, or Webdis?",
"settings_chooseservice_s3": "S3 or compatible",
"settings_chooseservice_dropbox": "Dropbox",
"settings_chooseservice_webdav": "Webdav",
"settings_chooseservice_onedrive": "OneDrive for personal",
"settings_chooseservice_webdis": "Webdis (HTTP for Redis®)",
"settings_adv": "Advanced Settings",
"settings_concurrency": "Concurrency",
"settings_concurrency_desc": "How many files do you want to download or upload in parallel at most? By default it's set to 5. If you meet any problems such as rate limit, you can reduce the concurrency to a lower value.",
@ -258,7 +277,7 @@
"settings_deletetowhere_system_trash": "system trash (default)",
"settings_deletetowhere_obsidian_trash": "Obsidian .trash folder",
"settings_conflictaction": "Action For Conflict",
"settings_conflictaction_desc": "If a file is created or modified on both side since last update, it's a conflict event. How to deal with it? This only works for bidirectional sync.",
"settings_conflictaction_desc": "<p>If a file is created or modified on both side since last update, it's a conflict event. How to deal with it? This only works for bidirectional sync.</p>",
"settings_conflictaction_keep_newer": "newer version survives (default)",
"settings_conflictaction_keep_larger": "larger size version survives",
"settings_cleanemptyfolder": "Action For Empty Folders",
@ -270,6 +289,9 @@
"settings_protectmodifypercentage_000_desc": "0 (always block)",
"settings_protectmodifypercentage_050_desc": "50 (default)",
"settings_protectmodifypercentage_100_desc": "100 (disable the protection)",
"settings_protectmodifypercentage_custom_desc": "custom",
"settings_protectmodifypercentage_customfield": "Custom Abort Sync If Modification Above Percentage",
"settings_protectmodifypercentage_customfield_desc": "You need to enter a number between 0 (inclusive) and 100 (inclusive). Float number is also allowed.",
"setting_syncdirection": "Sync Direction",
"setting_syncdirection_desc": "Which direction should the plugin sync to? Please be aware that only CHANGED files (based on time and size) are synced regardless any option.",
"setting_syncdirection_bidirectional_desc": "Bidirectional (default)",
@ -280,9 +302,12 @@
"settings_importexport": "Import and Export Partial Settings",
"settings_export": "Export",
"settings_export_desc": "Export settings by generating a QR code or URI.",
"settings_export_all_but_oauth2_button": "Export Non-Oauth2 Part",
"settings_export_basic_and_advanced_button": "Export Basic And Advanced Part",
"settings_export_s3_button": "Export S3 Part",
"settings_export_dropbox_button": "Export Dropbox Part",
"settings_export_onedrive_button": "Export OneDrive Part",
"settings_export_webdav_button": "Export Webdav Part",
"settings_export_webdis_button": "Export Webdis Part",
"settings_import": "Import",
"settings_import_desc": "Paste the exported URI into here and click \"Import\". Or, you can open a camera or scan-qrcode app to scan the QR code.",
"settings_import_button": "Import",
@ -300,6 +325,7 @@
"settings_viewconsolelog_desc": "On desktop, please press \"ctrl+shift+i\" or \"cmd+shift+i\" to view the log. On mobile, please install the third-party plugin <a href='https://obsidian.md/plugins?search=Logstravaganza'>Logstravaganza</a> to export the console log to a note.",
"settings_syncplans": "Export Sync Plans",
"settings_syncplans_desc": "Sync plans are created every time after you trigger sync and before the actual sync. Useful to know what would actually happen in those sync. Click the button to export sync plans.",
"settings_syncplans_button_1_only_change": "Export latest 1 (change part)",
"settings_syncplans_button_1": "Export latest 1",
"settings_syncplans_button_5": "Export latest 5",
"settings_syncplans_button_all": "Export All",
@ -316,6 +342,10 @@
"settings_profiler_results_desc": "The plugin records the time cost of each steps. Here you can export them to know which step is slow.",
"settings_profiler_results_notice": "Profiler results exported.",
"settings_profiler_results_button_all": "Export All",
"settings_profiler_enabledebugprint": "Enable Profiler Printing",
"settings_profiler_enabledebugprint_desc": "Print profiler result in each insertion to console or not?",
"settings_profiler_recordsize": "Enable Profiler Recording Size",
"settings_profiler_recordsize_desc": "Let profiler record object sizes or not?",
"settings_outputbasepathvaultid": "Output Vault Base Path And Randomly Assigned ID",
"settings_outputbasepathvaultid_desc": "For debugging purposes.",
"settings_outputbasepathvaultid_button": "Output",
@ -323,10 +353,13 @@
"settings_resetcache_desc": "Reset local internal caches/databases (for debugging purposes). You would want to reload the plugin after resetting this. This option will not empty the {s3, password...} settings.",
"settings_resetcache_button": "Reset",
"settings_resetcache_notice": "Local internal cache/databases deleted. Please manually reload the plugin.",
"syncalgov3_title": "Remotely Save has HUGE updates on the sync algorithm",
"syncalgov3_texts": "Welcome to use Remotely Save!\nFrom this version, a new algorithm has been developed:\n<ul><li>More robust deletion sync,</li><li>minimal conflict handling,</li><li>no meta data uploaded any more,</li><li>deletion / modification protection,</li><li>backup mode</li><li>new encryption method</li><li>...</li></ul>\nStay tune for more! A full introduction is in the <a href='https://github.com/remotely-save/remotely-save/tree/master/docs/sync_algorithm/v3/intro.md'>doc website</a>.\nIf you agree to use this, please read and check two checkboxes then click the \"Agree\" button, and enjoy the plugin!\nIf you do not agree, please click the \"Do Not Agree\" button, the plugin will unload itself.\nAlso, please consider <a href='https://github.com/remotely-save/remotely-save'>visit the GitHub repo and star ⭐ it</a>! Or even <a href='https://github.com/remotely-save/donation'>buy me a coffee</a>. Your support is very important to me! Thanks!",
"syncalgov3_checkbox_manual_backup": "I will backup my vault manually firstly.",
"syncalgov3_checkbox_requiremultidevupdate": "I understand I need to update the plugin ACROSS ALL DEVICES to make them work properly.",
"syncalgov3_button_agree": "Agree",
"syncalgov3_button_disagree": "Do Not Agree"
"syncalgov3_button_disagree": "Do Not Agree",
"menu_check_file_stat": "Check file stats"
}

View File

@ -9,9 +9,9 @@
"syncrun_syncingribbon": "{{pluginName}}:正在由 {{triggerSource}} 触发运行",
"syncrun_step0": "0/8 Remotely Save 在空跑dry run模式不会发生实际的文件交换。",
"syncrun_step1": "1/8 Remotely Save 准备同步({{serviceType}}",
"syncrun_step2": "2/8 正在获取远端的元数据。",
"syncrun_step3": "3/8 正在检查密码正确与否。",
"syncrun_step2": "2/8 正在获取远端的元数据和检查密码。",
"syncrun_passworderr": "检查密码时候出错。",
"syncrun_step3": "3/8 正在处理远端的元数据。",
"syncrun_step4": "4/8 正在获取本地的元数据。",
"syncrun_step5": "5/8 正在获取本地上一次同步的元数据。",
"syncrun_step6": "6/8 正在生成同步计划。",
@ -39,6 +39,7 @@
"command_startsync": "开始同步start sync",
"command_drynrun": "开始同步空跑模式start sync (dry run only)",
"command_exportsyncplans_json": "导出同步计划为 json 格式export sync plans in json format",
"command_exportsyncplans_1_only_change": "导出同步计划(最近 1 次仅修改部分export sync plans (latest 1) (change part)",
"command_exportsyncplans_1": "导出同步计划(最近 1 次export sync plans (latest 1)",
"command_exportsyncplans_5": "导出同步计划(最近 5 次export sync plans (latest 5)",
"command_exportsyncplans_all": "导出同步计划所有export sync plans (all)",
@ -53,11 +54,11 @@
"statusbar_time_lessminute": "一分钟之内同步",
"statusbar_lastsync": "上一次同步于:{{time}}",
"statusbar_syncing": "正在同步",
"statusbar_failed": "上次同步失败了",
"statusbar_now": "刚同步完",
"statusbar_lastsync_label": "上一次同步于:{{date}}",
"statusbar_lastsync_never": "没触发过同步",
"statusbar_lastsync_never_label": "没触发过同步",
"modal_password_title": "稍等一下,请阅读下文:",
"modal_password_shortdesc": "如果密码不是空的,那么文件会在上传之前,在本地先用此密码加密。\n如果密码是空的那么文件会被非加密地上传。",
"modal_password_attn1": "注意 1/5Vault名字是不会加密的本插件会在一些远程存储里创建一个和库名字有着同名的文件夹。",
@ -133,17 +134,13 @@
"settings_runoncestartup_1sec": "启动后第 1 秒运行一次",
"settings_runoncestartup_10sec": "启动后第 10 秒运行一次",
"settings_runoncestartup_30sec": "启动后第 30 秒运行一次",
"settings_saverun": "保存时同步(实验性质)",
"settings_saverun_desc": "插件如果检查到当前文件在最近一段时间有修改保存过,则尝试同步。请注意,同步是一个很重的操作,因此会影响到耗电量。(修改设置后可能需要重载插件或重启。)",
"settings_saverun_notset": "(不设置)",
"settings_saverun_1sec": "隔 1 秒检查一次",
"settings_saverun_5sec": "隔 5 秒检查一次",
"settings_saverun_10sec": "隔 10 秒检查一次(推荐)",
"settings_saverun_1min": "隔 1 分钟检查一次",
"settings_synconsave": "保存时同步(实验性质)",
"settings_synconsave_desc": "插件如果检查到当前文件在最近一段时间有修改保存过,则尝试同步。请注意,同步是一个很重的操作,因此会影响到耗电量。(修改设置后可能需要重载插件或重启。)",
"settings_synconsave_disable": "关闭(默认)",
"settings_synconsave_enable": "开启",
"settings_skiplargefiles": "跳过大文件",
"settings_skiplargefiles_desc": "跳过大于某一个阈值的文件。这里 1 MB = 10^6 bytes。",
"settings_skiplargefiles_notset": "(不设置)",
"settings_ignorepaths": "忽略的文件或文件夹的正则表达式",
"settings_ignorepaths_desc": "忽略的文件或文件夹的正则表达式。每行一条。路径是相对于库Vault根目录的没有前置 / 符号。",
"settings_enablestatusbar_info": "在状态栏显示上一次成功的同步",
@ -153,7 +150,6 @@
"settings_resetstatusbar_time_desc": "重设上一次成功同步的时间记录。",
"settings_resetstatusbar_button": "重设",
"settings_resetstatusbar_notice": "重设完毕!",
"settings_checkonnectivity": "检查可否连接",
"settings_checkonnectivity_desc": "检查可否连接。",
"settings_checkonnectivity_button": "检查",
@ -186,6 +182,12 @@
"settings_s3_accuratemtime_desc": "读取(已上传的)准确的文件修改时间,有助于同步算法更加准确和稳定。但是它也会导致额外的 api 请求、时间、金钱花费。",
"settings_s3_urlstyle": "S3 URL style",
"settings_s3_urlstyle_desc": "是否对 S3 对象强制使用 path style URL例如使用 https://s3.amazonaws.com/*/ 而不是 https://*.s3.amazonaws.com/)。",
"settings_s3_reverse_proxy_no_sign_url": "S3 反向代理(不签名)地址(实验性质)",
"settings_s3_reverse_proxy_no_sign_url_desc": "不会参与到签名的 S3 反向代理地址。如果您有一个反向代理,但是不想修改原始鉴权签名,这里就可以填写。没有 http(s):// 前缀。如果您不知道这是什么,留空即可。",
"settings_s3_generatefolderobject": "是否生成文件夹 Object",
"settings_s3_generatefolderobject_desc": "S3 不存在“真正”的文件夹。如果您设置了“生成”(或用了旧版本),那么插件会上传 0 字节的以“/”结尾的 Object 来代表文件夹。新版本插件会默认跳过生成这种文件夹 Object。",
"settings_s3_generatefolderobject_notgenerate": "不生成(默认)",
"settings_s3_generatefolderobject_generate": "生成",
"settings_s3_connect_succ": "很好!可以访问到对应存储桶。",
"settings_s3_connect_fail": "无法访问到对应存储桶。",
"settings_dropbox": "Dropbox 设置",
@ -219,6 +221,10 @@
"settings_onedrive_auth_button": "鉴权",
"settings_onedrive_connect_succ": "很好!我们可连接上 OneDrive",
"settings_onedrive_connect_fail": "我们未能连接上 OneDrive。",
"settings_onedrive_emptyfile": "空文件处理",
"settings_onedrive_emptyfile_desc": "OneDrive 不允许上传空文件(即使官网也是不允许的)。那么你想跳过空文件还是返回错误?",
"settings_onedrive_emptyfile_skip": "跳过",
"settings_onedrive_emptyfile_error": "返回错误和中断",
"settings_webdav": "Webdav 设置",
"settings_webdav_disclaimer1": "声明:您所输入的信息存储于本地。其它有害的或者出错的插件,是有可能读取到这些信息的。如果您发现了 Webdav 服务器有不符合预期的访问,请立刻修改用户名和密码。",
"settings_webdav_cors_os": "Obsidian 桌面版>=0.13.25 或 iOS>=1.1.1 或 Android>=1.2.1 支持跳过 CORS 设置。但您正在使用旧版,建议升级。",
@ -239,12 +245,25 @@
"settings_webdav_connect_succ": "很好!可以连接上 Webdav 服务器。",
"settings_webdav_connect_fail": "无法连接上 Webdav 服务器。(可能是地址/账号/密码/鉴权类型等错误。)",
"settings_webdav_connect_fail_withcors": "无法连接上 Webdav 服务器。(可能是地址/账号/密码/鉴权类型/CORS 等错误。)",
"settings_webdis": "Webdis 设置",
"settings_webdis_disclaimer1": "声明:此插件不是 Redis® Ltd 或 Redis® 软件或 Wedis 的官方产品。Redis 是 Redis Ltd 的注册商标。",
"settings_webdis_disclaimer2": "声明:您所输入的信息存储于本地。其它有害的或者出错的插件,是有可能读取到这些信息的。如果您发现了 Webdis 服务器有不符合预期的访问,请立刻修改用户名和密码。",
"settings_webdis_folder": "我们会在您的服务器上创建带有此前缀的 key 并在里面同步::{{remoteBaseDir}}。",
"settings_webdis_addr": "服务器地址",
"settings_webdis_addr_desc": "服务器地址",
"settings_webdis_user": "用户名",
"settings_webdis_user_desc": "用户名。注意:用户名和其它信息都会保存在本地。",
"settings_webdis_password": "密码",
"settings_webdis_password_desc": "密码。注意:密码和其它信息都会保存在本地。",
"settings_webdis_connect_succ": "很好!可以连接上 Webdis 服务器。",
"settings_webdis_connect_fail": "无法连接上 Webdis 服务器。(可能是地址/账号/密码/鉴权类型等错误。)",
"settings_chooseservice": "选择远程服务",
"settings_chooseservice_desc": "从这里开始设置。您想连接到哪一个服务S3、Dropbox、Webdav、OneDrive个人版",
"settings_chooseservice_desc": "从这里开始设置。您想连接到哪一个服务S3、Dropbox、Webdav、OneDrive个人版、Webdis",
"settings_chooseservice_s3": "S3 或兼容 S3 的服务",
"settings_chooseservice_dropbox": "Dropbox",
"settings_chooseservice_webdav": "Webdav",
"settings_chooseservice_onedrive": "OneDrive个人版",
"settings_chooseservice_webdis": "Webdis (an HTTP interface for Redis)",
"settings_adv": "进阶设置",
"settings_concurrency": "并行度",
"settings_concurrency_desc": "您希望同时最多有多少个文件被上传和下载?默认值是 5。如果您遇到了一些问题如访问频率限制您可以减少并行度。",
@ -261,7 +280,7 @@
"settings_conflictaction_keep_newer": "保留最后修改的版本(默认)",
"settings_conflictaction_keep_larger": "保留文件体积较大的版本",
"settings_cleanemptyfolder": "处理空文件夹",
"settings_cleanemptyfolder_desc": "同步算法主要是针对文件处理的,您要手动指定空文件夹如何处理。",
"settings_cleanemptyfolder_desc": "同步算法主要是针对文件处理的,您要手动指定空文件夹如何处理。",
"settings_cleanemptyfolder_skip": "跳过处理空文件夹(默认)",
"settings_cleanemptyfolder_clean_both": "删除本地和服务器的空文件夹",
"settings_protectmodifypercentage": "如果修改超过百分比则中止同步",
@ -269,6 +288,9 @@
"settings_protectmodifypercentage_000_desc": "0总是强制中止",
"settings_protectmodifypercentage_050_desc": "50默认值",
"settings_protectmodifypercentage_100_desc": "100去除此保护",
"settings_protectmodifypercentage_custom_desc": "自定义",
"settings_protectmodifypercentage_customfield": "如果修改超过自定义百分比则中止同步",
"settings_protectmodifypercentage_customfield_desc": "您需要输入 0~ 100的数字。小数也是可以的。",
"setting_syncdirection": "同步方向",
"setting_syncdirection_desc": "插件应该向哪里同步?注意每个选项都是只有修改了的文件(基于修改时间和大小判断)才会触发同步动作。",
"setting_syncdirection_bidirectional_desc": "双向同步(默认)",
@ -279,9 +301,12 @@
"settings_importexport": "导入导出部分设置",
"settings_export": "导出",
"settings_export_desc": "用 QR 码或 URI 导出设置信息。",
"settings_export_all_but_oauth2_button": "导出非 Oauth2 部分",
"settings_export_basic_and_advanced_button": "导出基本或进阶设置",
"settings_export_s3_button": "导出 S3 部分",
"settings_export_dropbox_button": "导出 Dropbox 部分",
"settings_export_onedrive_button": "导出 OneDrive 部分",
"settings_export_webdav_button": "导出 Webdav 部分",
"settings_export_webdis_button": "导出 Webdis 部分",
"settings_import": "导入",
"settings_import_desc": "粘贴之前导出的 URI 到这里然后点击“导入”。或,使用拍摄 app 或者扫描 QR 码的 app来扫描对应的 QR 码。",
"settings_import_button": "导入",
@ -299,6 +324,7 @@
"settings_viewconsolelog_desc": "电脑上输入“ctrl+shift+i”或“cmd+shift+i”来查看终端输出。手机上安装第三方插件 <a href='https://obsidian.md/plugins?search=Logstravaganza'>Logstravaganza</a> 来导出终端输出到一篇笔记上。",
"settings_syncplans": "导出同步计划",
"settings_syncplans_desc": "每次您启动同步,并在实际上传下载前,插件会生成同步计划。它可以使您知道每次同步发生了什么。点击按钮可以导出同步计划。",
"settings_syncplans_button_1_only_change": "导出最近 1 次(仅修改部分)",
"settings_syncplans_button_1": "导出最近 1 次",
"settings_syncplans_button_5": "导出最近 5 次",
"settings_syncplans_button_all": "导出所有",
@ -322,10 +348,13 @@
"settings_resetcache_desc": "(出于调试原因)重设本地缓存和数据库。您需要在重设之后重新载入此插件。本重设不会删除 s3密码……等设定。",
"settings_resetcache_button": "重设",
"settings_resetcache_notice": "本地同步缓存和数据库已被删除。请手动重新载入此插件。",
"syncalgov3_title": "Remotely Save 的同步算法有重大更新",
"syncalgov3_texts": "欢迎使用 Remotely Save\n从这个版本开始插件更新了同步算法\n<ul><li>更稳健的删除同步</li><li>引入冲突处理</li><li>避免上传元数据</li><li>修改删除保护</li><li>备份模式</li><li>新的加密方式</li><li>……</li></ul>\n敬请期待更多更新详细介绍请参阅<a href='https://github.com/remotely-save/remotely-save/tree/master/docs/sync_algorithm/v3/intro.md'>文档网站</a>。\n如果您同意使用新版本请阅读和勾选两个勾选框然后点击“同意”按钮开始使用插件吧\n如果您不同意请点击“不同意”按钮插件将自动停止运行unload。\n此外请考虑<a href='https://github.com/remotely-save/remotely-save'>访问 GitHub 页面然后点赞 ⭐</a>!您的支持对我十分重要!谢谢!",
"syncalgov3_checkbox_manual_backup": "我将会首先手动备份我的库Vault。",
"syncalgov3_checkbox_requiremultidevupdate": "我理解,我需要在所有设备上都更新此插件使之正常运行。",
"syncalgov3_button_agree": "同意",
"syncalgov3_button_disagree": "不同意"
"syncalgov3_button_disagree": "不同意",
"menu_check_file_stat": "查看文件属性"
}

View File

@ -9,9 +9,9 @@
"syncrun_syncingribbon": "{{pluginName}}:正在由 {{triggerSource}} 觸發執行",
"syncrun_step0": "0/8 Remotely Save 在空跑dry run模式不會發生實際的檔案交換。",
"syncrun_step1": "1/8 Remotely Save 準備同步({{serviceType}}",
"syncrun_step2": "2/8 正在獲取遠端的元資料。",
"syncrun_step3": "3/8 正在檢查密碼正確與否。",
"syncrun_step2": "2/8 正在獲取遠端的元數據和檢查密碼。",
"syncrun_passworderr": "檢查密碼時候出錯。",
"syncrun_step3": "3/8 正在處理遠端的元數據。",
"syncrun_step4": "4/8 正在獲取本地的元資料。",
"syncrun_step5": "5/8 正在獲取本地上一次同步的元資料。",
"syncrun_step6": "6/8 正在生成同步計劃。",
@ -38,6 +38,7 @@
"protocol_onedrive_connect_unknown": "不知道如何處理此 callback{{params}}",
"command_startsync": "開始同步start sync",
"command_drynrun": "開始同步空跑模式start sync (dry run only)",
"command_exportsyncplans_1_only_change": "匯出同步計劃(最近 1 次僅修改部分export sync plans (latest 1) (change part)",
"command_exportsyncplans_1": "匯出同步計劃(最近 1 次export sync plans (latest 1)",
"command_exportsyncplans_5": "匯出同步計劃(最近 5 次export sync plans (latest 5)",
"command_exportsyncplans_all": "匯出同步計劃所有export sync plans (all)",
@ -52,11 +53,11 @@
"statusbar_time_lessminute": "一分鐘之內同步",
"statusbar_lastsync": "上一次同步於:{{time}}",
"statusbar_syncing": "正在同步",
"statusbar_failed": "上次同步失敗了",
"statusbar_now": "剛同步完",
"statusbar_lastsync_label": "上一次同步於:{{date}}",
"statusbar_lastsync_never": "沒觸發過同步",
"statusbar_lastsync_never_label": "沒觸發過同步",
"modal_password_title": "稍等一下,請閱讀下文:",
"modal_password_shortdesc": "如果密碼不是空的,那麼檔案會在上傳之前,在本地先用此密碼加密。\n如果密碼是空的那麼檔案會被非加密地上傳。",
"modal_password_attn1": "注意 1/5儲存庫Vault名字是不會加密的本外掛會在一些遠端儲存裡建立一個和庫名字有著同名的資料夾。",
@ -132,17 +133,13 @@
"settings_runoncestartup_1sec": "啟動後第 1 秒執行一次",
"settings_runoncestartup_10sec": "啟動後第 10 秒執行一次",
"settings_runoncestartup_30sec": "啟動後第 30 秒執行一次",
"settings_saverun": "儲存時同步(實驗性質)",
"settings_saverun_desc": "外掛如果檢查到當前檔案在最近一段時間有修改儲存過,則嘗試同步。請注意,同步是一個很重的操作,因此會影響到耗電量。(修改設定後可能需要過載外掛或重啟。)",
"settings_saverun_notset": "(不設定)",
"settings_saverun_1sec": "隔 1 秒檢查一次",
"settings_saverun_5sec": "隔 5 秒檢查一次",
"settings_saverun_10sec": "隔 10 秒檢查一次(推薦)",
"settings_saverun_1min": "隔 1 分鐘檢查一次",
"settings_synconsave": "儲存時同步(實驗性質)",
"settings_synconsave_desc": "外掛如果檢查到當前檔案在最近一段時間有修改儲存過,則嘗試同步。請注意,同步是一個很重的操作,因此會影響到耗電量。(修改設定後可能需要過載外掛或重啟。)",
"settings_synconsave_disable": "關閉(預設)",
"settings_synconsave_enable": "開啟",
"settings_skiplargefiles": "跳過大檔案",
"settings_skiplargefiles_desc": "跳過大於某一個閾值的檔案。這裡 1 MB = 10^6 bytes。",
"settings_skiplargefiles_notset": "(不設定)",
"settings_ignorepaths": "忽略的檔案或資料夾的正則表示式",
"settings_ignorepaths_desc": "忽略的檔案或資料夾的正則表示式。每行一條。路徑是相對於庫Vault根目錄的沒有前置 / 符號。",
"settings_enablestatusbar_info": "在狀態列顯示上一次成功的同步",
@ -152,7 +149,6 @@
"settings_resetstatusbar_time_desc": "重設上一次成功同步的時間記錄。",
"settings_resetstatusbar_button": "重設",
"settings_resetstatusbar_notice": "重設完畢!",
"settings_checkonnectivity": "檢查可否連線",
"settings_checkonnectivity_desc": "檢查可否連線。",
"settings_checkonnectivity_button": "檢查",
@ -185,6 +181,12 @@
"settings_s3_accuratemtime_desc": "讀取(已上傳的)準確的檔案修改時間,有助於同步演算法更加準確和穩定。但是它也會導致額外的 api 請求、時間、金錢花費。",
"settings_s3_urlstyle": "S3 URL style",
"settings_s3_urlstyle_desc": "是否對 S3 物件強制使用 path style URL例如使用 https://s3.amazonaws.com/*/ 而不是 https://*.s3.amazonaws.com/)。",
"settings_s3_reverse_proxy_no_sign_url": "S3 反向代理(不簽名)地址(實驗性質)",
"settings_s3_reverse_proxy_no_sign_url_desc": "不會參與到簽名的 S3 反向代理地址。如果您有一個反向代理,但是不想修改原始鑑權簽名,這裡就可以填寫。沒有 http(s):// 字首。如果您不知道這是什麼,留空即可。",
"settings_s3_generatefolderobject": "是否生成文件夾 Object",
"settings_s3_generatefolderobject_desc": "S3 不存在“真正”的文件夾。如果您設置了“生成”(或用了舊版本),那麼插件會上傳 0 字節的以“/”結尾的 Object 來代表文件夾。新版本插件會默認跳過生成這種文件夾 Object。",
"settings_s3_generatefolderobject_notgenerate": "不生成(默認)",
"settings_s3_generatefolderobject_generate": "生成",
"settings_s3_connect_succ": "很好!可以訪問到對應儲存桶。",
"settings_s3_connect_fail": "無法訪問到對應儲存桶。",
"settings_dropbox": "Dropbox 設定",
@ -218,6 +220,10 @@
"settings_onedrive_auth_button": "鑑權",
"settings_onedrive_connect_succ": "很好!我們可連線上 OneDrive",
"settings_onedrive_connect_fail": "我們未能連線上 OneDrive。",
"settings_onedrive_emptyfile": "空檔案處理",
"settings_onedrive_emptyfile_desc": "OneDrive 不允許上傳空檔案(即使官網也是不允許的)。那麼你想跳過空檔案還是返回錯誤?",
"settings_onedrive_emptyfile_skip": "跳過",
"settings_onedrive_emptyfile_error": "返回錯誤和中斷",
"settings_webdav": "Webdav 設定",
"settings_webdav_disclaimer1": "宣告:您所輸入的資訊儲存於本地。其它有害的或者出錯的外掛,是有可能讀取到這些資訊的。如果您發現了 Webdav 伺服器有不符合預期的訪問,請立刻修改使用者名稱和密碼。",
"settings_webdav_cors_os": "Obsidian 桌面版>=0.13.25 或 iOS>=1.1.1 或 Android>=1.1.1 支援跳過 CORS 設定。但您正在使用舊版,建議升級。",
@ -238,12 +244,25 @@
"settings_webdav_connect_succ": "很好!可以連線上 Webdav 伺服器。",
"settings_webdav_connect_fail": "無法連線上 Webdav 伺服器。(可能是地址/賬號/密碼/鑑權型別等錯誤。)",
"settings_webdav_connect_fail_withcors": "無法連線上 Webdav 伺服器。(可能是地址/賬號/密碼/鑑權型別/CORS 等錯誤。)",
"settings_webdis": "Webdis 設置",
"settings_webdis_disclaimer1": "聲明:此插件不是 Redis® Ltd 或 Redis® 軟件或 Wedis 的官方產品。Redis 是 Redis Ltd 的註冊商標。",
"settings_webdis_disclaimer2": "聲明:您所輸入的信息存儲於本地。其它有害的或者出錯的插件,是有可能讀取到這些信息的。如果您發現了 Webdis 服務器有不符合預期的訪問,請立刻修改用戶名和密碼。",
"settings_webdis_folder": "我們會在您的服務器上創建帶有此前綴的 key 並在裡面同步::{{remoteBaseDir}}。",
"settings_webdis_addr": "服務器地址",
"settings_webdis_addr_desc": "服務器地址",
"settings_webdis_user": "用戶名",
"settings_webdis_user_desc": "用戶名。注意:用戶名和其它信息都會保存在本地。",
"settings_webdis_password": "密碼",
"settings_webdis_password_desc": "密碼。注意:密碼和其它信息都會保存在本地。",
"settings_webdis_connect_succ": "很好!可以連接上 Webdis 服務器。",
"settings_webdis_connect_fail": "無法連接上 Webdis 服務器。(可能是地址/賬號/密碼/鑑權類型等錯誤。)",
"settings_chooseservice": "選擇遠端服務",
"settings_chooseservice_desc": "從這裡開始設定。您想連線到哪一個服務S3、Dropbox、Webdav、OneDrive個人版",
"settings_chooseservice_desc": "從這裡開始設定。您想連線到哪一個服務S3、Dropbox、Webdav、OneDrive個人版、Webdis",
"settings_chooseservice_s3": "S3 或相容 S3 的服務",
"settings_chooseservice_dropbox": "Dropbox",
"settings_chooseservice_webdav": "Webdav",
"settings_chooseservice_onedrive": "OneDrive個人版",
"settings_chooseservice_webdis": "Webdis (an HTTP interface for Redis®)",
"settings_adv": "進階設定",
"settings_concurrency": "並行度",
"settings_concurrency_desc": "您希望同時最多有多少個檔案被上傳和下載?預設值是 5。如果您遇到了一些問題如訪問頻率限制您可以減少並行度。",
@ -268,6 +287,9 @@
"settings_protectmodifypercentage_000_desc": "0總是強制中止",
"settings_protectmodifypercentage_050_desc": "50預設值",
"settings_protectmodifypercentage_100_desc": "100去除此保護",
"settings_protectmodifypercentage_custom_desc": "自定義",
"settings_protectmodifypercentage_customfield": "如果修改超過自定義百分比則中止同步",
"settings_protectmodifypercentage_customfield_desc": "您需要輸入 0~ 100的數字。小數也是可以的。",
"setting_syncdirection": "同步方向",
"setting_syncdirection_desc": "外掛應該向哪裡同步?注意每個選項都是隻有修改了的檔案(基於修改時間和大小判斷)才會觸發同步動作。",
"setting_syncdirection_bidirectional_desc": "雙向同步(預設)",
@ -278,9 +300,12 @@
"settings_importexport": "匯入匯出部分設定",
"settings_export": "匯出",
"settings_export_desc": "用 QR 碼或 URI 匯出設定資訊。",
"settings_export_all_but_oauth2_button": "匯出非 Oauth2 部分",
"settings_export_basic_and_advanced_button": "匯出基本或進階設定",
"settings_export_s3_button": "匯出 S3 部分",
"settings_export_dropbox_button": "匯出 Dropbox 部分",
"settings_export_onedrive_button": "匯出 OneDrive 部分",
"settings_export_webdav_button": "匯出 Webdav 部分",
"settings_export_webdis_button": "匯出 Webdis 部分",
"settings_import": "匯入",
"settings_import_desc": "貼上之前匯出的 URI 到這裡然後點選“匯入”。或,使用拍攝 app 或者掃描 QR 碼的 app來掃描對應的 QR 碼。",
"settings_import_button": "匯入",
@ -298,6 +323,7 @@
"settings_viewconsolelog_desc": "電腦上輸入“ctrl+shift+i”或“cmd+shift+i”來檢視終端輸出。手機上安裝第三方外掛 <a href='https://obsidian.md/plugins?search=Logstravaganza'>Logstravaganza</a> 來匯出終端輸出到一篇筆記上。",
"settings_syncplans": "匯出同步計劃",
"settings_syncplans_desc": "每次您啟動同步,並在實際上傳下載前,外掛會生成同步計劃。它可以使您知道每次同步發生了什麼。點選按鈕可以匯出同步計劃。",
"settings_syncplans_button_1_only_change": "匯出最近 1 次(僅修改部分)",
"settings_syncplans_button_1": "匯出最近 1 次",
"settings_syncplans_button_5": "匯出最近 5 次",
"settings_syncplans_button_all": "匯出所有",
@ -321,10 +347,13 @@
"settings_resetcache_desc": "(出於除錯原因)重設本地快取和資料庫。您需要在重設之後重新載入此外掛。本重設不會刪除 s3密碼……等設定。",
"settings_resetcache_button": "重設",
"settings_resetcache_notice": "本地同步快取和資料庫已被刪除。請手動重新載入此外掛。",
"syncalgov3_title": "Remotely Save 的同步演算法有重大更新",
"syncalgov3_texts": "歡迎使用 Remotely Save\n從這個版本開始外掛更新了同步演算法\n<ul><li>更穩健的刪除同步</li><li>引入衝突處理</li><li>避免上傳元資料</li><li>修改刪除保護</li><li>備份模式</li><li>新的加密方式</li><li>……</li></ul>\n敬請期待更多更新詳細介紹請參閱<a href='https://github.com/remotely-save/remotely-save/tree/master/docs/sync_algorithm/v3/intro.md'>文件網站</a>。\n如果您同意使用新版本請閱讀和勾選兩個勾選框然後點選“同意”按鈕開始使用外掛吧\n如果您不同意請點選“不同意”按鈕外掛將自動停止執行unload。\n此外請考慮<a href='https://github.com/remotely-save/remotely-save'>訪問 GitHub 頁面然後點贊 ⭐</a>!您的支援對我十分重要!謝謝!",
"syncalgov3_checkbox_manual_backup": "我將會首先手動備份我的庫Vault。",
"syncalgov3_checkbox_requiremultidevupdate": "我理解,我需要在所有裝置上都更新此外掛使之正常執行。",
"syncalgov3_button_agree": "同意",
"syncalgov3_button_disagree": "不同意"
"syncalgov3_button_disagree": "不同意",
"menu_check_file_stat": "檢視檔案屬性"
}

View File

@ -1,76 +0,0 @@
import { TFile, TFolder, type Vault } from "obsidian";
import type { Entity, MixedEntity } from "./baseTypes";
import { listFilesInObsFolder } from "./obsFolderLister";
import { Profiler } from "./profiler";
export const getLocalEntityList = async (
vault: Vault,
syncConfigDir: boolean,
configDir: string,
pluginID: string,
profiler: Profiler
) => {
profiler.addIndent();
profiler.insert("enter getLocalEntityList");
const local: Entity[] = [];
const localTAbstractFiles = vault.getAllLoadedFiles();
profiler.insert("finish getting getAllLoadedFiles");
for (const entry of localTAbstractFiles) {
let r = {} as Entity;
let key = entry.path;
if (entry.path === "/") {
// ignore
continue;
} else if (entry instanceof TFile) {
let mtimeLocal: number | undefined = entry.stat.mtime;
if (mtimeLocal <= 0) {
mtimeLocal = entry.stat.ctime;
}
if (mtimeLocal === 0) {
mtimeLocal = undefined;
}
if (mtimeLocal === undefined) {
throw Error(
`Your file has last modified time 0: ${key}, don't know how to deal with it`
);
}
r = {
key: entry.path, // local always unencrypted
keyRaw: entry.path,
mtimeCli: mtimeLocal,
mtimeSvr: mtimeLocal,
size: entry.stat.size, // local always unencrypted
sizeRaw: entry.stat.size,
};
} else if (entry instanceof TFolder) {
key = `${entry.path}/`;
r = {
key: key,
keyRaw: key,
size: 0,
sizeRaw: 0,
};
} else {
throw Error(`unexpected ${entry}`);
}
local.push(r);
}
profiler.insert("finish transforming getAllLoadedFiles");
if (syncConfigDir) {
profiler.insert("into syncConfigDir");
const syncFiles = await listFilesInObsFolder(configDir, vault, pluginID);
for (const f of syncFiles) {
local.push(f);
}
profiler.insert("finish syncConfigDir");
}
profiler.insert("finish getLocalEntityList");
profiler.removeIndent();
return local;
};

View File

@ -1,14 +1,14 @@
import localforage from "localforage";
import { extendPrototype } from "localforage-getitems";
extendPrototype(localforage);
import { extendPrototype as ep1 } from "localforage-getitems";
import { extendPrototype as ep2 } from "localforage-removeitems";
ep1(localforage);
ep2(localforage);
export type LocalForage = typeof localforage;
import { nanoid } from "nanoid";
import { requireApiVersion, TAbstractFile, TFile, TFolder } from "obsidian";
import { API_VER_STAT_FOLDER } from "./baseTypes";
import type { Entity, MixedEntity, SUPPORTED_SERVICES_TYPE } from "./baseTypes";
import type { Entity, SUPPORTED_SERVICES_TYPE } from "./baseTypes";
import { unixTimeToStr } from "./misc";
import type { SyncPlanType } from "./sync";
import { statFix, toText, unixTimeToStr } from "./misc";
const DB_VERSION_NUMBER_IN_HISTORY = [20211114, 20220108, 20220326, 20240220];
export const DEFAULT_DB_VERSION_NUMBER: number = 20240220;
@ -20,6 +20,7 @@ export const DEFAULT_TBL_LOGGER_OUTPUT = "loggeroutput";
export const DEFAULT_TBL_SIMPLE_KV_FOR_MISC = "simplekvformisc";
export const DEFAULT_TBL_PREV_SYNC_RECORDS = "prevsyncrecords";
export const DEFAULT_TBL_PROFILER_RESULTS = "profilerresults";
export const DEFAULT_TBL_FILE_CONTENT_HISTORY = "filecontenthistory";
/**
* @deprecated
@ -62,6 +63,7 @@ export interface InternalDBs {
simpleKVForMiscTbl: LocalForage;
prevSyncRecordsTbl: LocalForage;
profilerResultsTbl: LocalForage;
fileContentHistoryTbl: LocalForage;
/**
* @deprecated
@ -221,6 +223,11 @@ export const prepareDBs = async (
name: DEFAULT_DB_NAME,
storeName: DEFAULT_TBL_SYNC_MAPPING,
}),
fileContentHistoryTbl: localforage.createInstance({
name: DEFAULT_DB_NAME,
storeName: DEFAULT_TBL_FILE_CONTENT_HISTORY,
}),
} as InternalDBs;
// try to get vaultRandomID firstly
@ -309,12 +316,15 @@ export const clearFileHistoryOfEverythingByVault = async (
db: InternalDBs,
vaultRandomID: string
) => {
const keys = await db.fileHistoryTbl.keys();
for (const key of keys) {
if (key.startsWith(`${vaultRandomID}\t`)) {
await db.fileHistoryTbl.removeItem(key);
}
}
const keys = (await db.fileHistoryTbl.keys()).filter((x) =>
x.startsWith(`${vaultRandomID}\t`)
);
await db.fileHistoryTbl.removeItems(keys);
// for (const key of keys) {
// if (key.startsWith(`${vaultRandomID}\t`)) {
// await db.fileHistoryTbl.removeItem(key);
// }
// }
};
/**
@ -341,12 +351,15 @@ export const clearAllSyncMetaMappingByVault = async (
db: InternalDBs,
vaultRandomID: string
) => {
const keys = await db.syncMappingTbl.keys();
for (const key of keys) {
if (key.startsWith(`${vaultRandomID}\t`)) {
await db.syncMappingTbl.removeItem(key);
}
}
const keys = (await db.syncMappingTbl.keys()).filter((x) =>
x.startsWith(`${vaultRandomID}\t`)
);
await db.syncMappingTbl.removeItems(keys);
// for (const key of keys) {
// if (key.startsWith(`${vaultRandomID}\t`)) {
// await db.syncMappingTbl.removeItem(key);
// }
// }
};
export const insertSyncPlanRecordByVault = async (
@ -402,7 +415,7 @@ export const clearExpiredSyncPlanRecords = async (db: InternalDBs) => {
const expiredTs = currTs - MILLISECONDS_OLD;
let records = (await db.syncPlansTbl.keys()).map((key) => {
const ts = parseInt(key.split("\t")[1]);
const ts = Number.parseInt(key.split("\t")[1]);
const expired = ts <= expiredTs;
return {
ts: ts,
@ -424,11 +437,12 @@ export const clearExpiredSyncPlanRecords = async (db: InternalDBs) => {
});
}
const ps = [] as Promise<void>[];
keysToRemove.forEach((element) => {
ps.push(db.syncPlansTbl.removeItem(element));
});
await Promise.all(ps);
// const ps = [] as Promise<void>[];
// keysToRemove.forEach((element) => {
// ps.push(db.syncPlansTbl.removeItem(element));
// });
// await Promise.all(ps);
await db.syncPlansTbl.removeItems(Array.from(keysToRemove));
};
export const getAllPrevSyncRecordsByVaultAndProfile = async (
@ -477,12 +491,10 @@ export const clearAllPrevSyncRecordByVault = async (
db: InternalDBs,
vaultRandomID: string
) => {
const keys = await db.prevSyncRecordsTbl.keys();
for (const key of keys) {
if (key.startsWith(`${vaultRandomID}\t`)) {
await db.prevSyncRecordsTbl.removeItem(key);
}
}
const keys = (await db.prevSyncRecordsTbl.keys()).filter((x) =>
x.startsWith(`${vaultRandomID}\t`)
);
await db.prevSyncRecordsTbl.removeItems(keys);
};
export const clearAllLoggerOutputRecords = async (db: InternalDBs) => {
@ -544,7 +556,7 @@ export const insertProfilerResultByVault = async (
// clear older one while writing
const records = (await db.profilerResultsTbl.keys())
.filter((x) => x.startsWith(`${vaultRandomID}\t`))
.map((x) => parseInt(x.split("\t")[1]));
.map((x) => Number.parseInt(x.split("\t")[1]));
records.sort((a, b) => -(a - b)); // descending
while (records.length > 5) {
const ts = records.pop()!;
@ -561,7 +573,7 @@ export const readAllProfilerResultsByVault = async (
if (key.startsWith(`${vaultRandomID}\t`)) {
records.push({
val: value as string,
ts: parseInt(key.split("\t")[1]),
ts: Number.parseInt(key.split("\t")[1]),
});
}
});

File diff suppressed because it is too large Load Diff

View File

@ -1,9 +1,9 @@
import { Platform, Vault } from "obsidian";
import * as path from "path";
import type { Vault } from "obsidian";
import { base32, base64url } from "rfc4648";
import XRegExp from "xregexp";
import emojiRegex from "emoji-regex";
import { base32 } from "rfc4648";
import XRegExp from "xregexp";
declare global {
interface Window {
@ -18,11 +18,7 @@ declare global {
* @param underscore
* @returns
*/
export const isHiddenPath = (
item: string,
dot: boolean = true,
underscore: boolean = true
) => {
export const isHiddenPath = (item: string, dot = true, underscore = true) => {
if (!(dot || underscore)) {
throw Error("parameter error for isHiddenPath");
}
@ -50,7 +46,7 @@ export const isHiddenPath = (
* @param x string
* @returns string[] might be empty
*/
export const getFolderLevels = (x: string, addEndingSlash: boolean = false) => {
export const getFolderLevels = (x: string, addEndingSlash = false) => {
const res: string[] = [];
if (x === "" || x === "/") {
@ -58,7 +54,7 @@ export const getFolderLevels = (x: string, addEndingSlash: boolean = false) => {
}
const y1 = x.split("/");
let i = 0;
const i = 0;
for (let index = 0; index + 1 < y1.length; index++) {
let k = y1.slice(0, index + 1).join("/");
if (k === "" || k === "/") {
@ -119,7 +115,7 @@ export const base64ToArrayBuffer = (b64text: string) => {
};
export const copyArrayBuffer = (src: ArrayBuffer) => {
var dst = new ArrayBuffer(src.byteLength);
const dst = new ArrayBuffer(src.byteLength);
new Uint8Array(dst).set(new Uint8Array(src));
return dst;
};
@ -134,18 +130,14 @@ export const hexStringToTypedArray = (hex: string) => {
if (f === null) {
throw Error(`input ${hex} is not hex, no way to transform`);
}
return new Uint8Array(
f.map(function (h) {
return parseInt(h, 16);
})
);
return new Uint8Array(f.map((h) => Number.parseInt(h, 16)));
};
export const base64ToBase32 = (a: string) => {
return base32.stringify(Buffer.from(a, "base64"));
};
export const base64ToBase64url = (a: string, pad: boolean = false) => {
export const base64ToBase64url = (a: string, pad = false) => {
let b = a.replace(/\+/g, "-").replace(/\//g, "_");
if (!pad) {
b = b.replace(/=/g, "");
@ -190,7 +182,7 @@ export const hasEmojiInText = (a: string) => {
* @param toLower
* @returns
*/
export const headersToRecord = (h: Headers, toLower: boolean = true) => {
export const headersToRecord = (h: Headers, toLower = true) => {
const res: Record<string, string> = {};
h.forEach((v, k) => {
if (toLower) {
@ -240,11 +232,11 @@ export const getParentFolder = (a: string) => {
* @param delimiter
* @returns
*/
export const setToString = (a: Set<string>, delimiter: string = ",") => {
export const setToString = (a: Set<string>, delimiter = ",") => {
return [...a].join(delimiter);
};
export const extractSvgSub = (x: string, subEl: string = "rect") => {
export const extractSvgSub = (x: string, subEl = "rect") => {
const parser = new window.DOMParser();
const dom = parser.parseFromString(x, "image/svg+xml");
const svg = dom.querySelector("svg")!;
@ -261,10 +253,10 @@ export const extractSvgSub = (x: string, subEl: string = "rect") => {
export const getRandomIntInclusive = (min: number, max: number) => {
const randomBuffer = new Uint32Array(1);
window.crypto.getRandomValues(randomBuffer);
let randomNumber = randomBuffer[0] / (0xffffffff + 1);
min = Math.ceil(min);
max = Math.floor(max);
return Math.floor(randomNumber * (max - min + 1)) + min;
const randomNumber = randomBuffer[0] / (0xffffffff + 1);
const min2 = Math.ceil(min);
const max2 = Math.floor(max);
return Math.floor(randomNumber * (max2 - min2 + 1)) + min2;
};
/**
@ -349,11 +341,17 @@ export const checkHasSpecialCharForDir = (x: string) => {
return /[?/\\]/.test(x);
};
export const unixTimeToStr = (x: number | undefined | null) => {
export const unixTimeToStr = (x: number | undefined | null, hasMs = false) => {
if (x === undefined || x === null || Number.isNaN(x)) {
return undefined;
}
return window.moment(x).format() as string;
if (hasMs) {
// 1716712162574 => '2024-05-26T16:29:22.574+08:00'
return window.moment(x).toISOString(true);
} else {
// 1716712162574 => '2024-05-26T16:29:22+08:00'
return window.moment(x).format() as string;
}
};
/**
@ -397,9 +395,8 @@ export const toText = (x: any) => {
if (
x instanceof Error ||
(x &&
x.stack &&
x.message &&
(x?.stack &&
x?.message &&
typeof x.stack === "string" &&
typeof x.message === "string")
) {
@ -425,7 +422,7 @@ export const toText = (x: any) => {
export const statFix = async (vault: Vault, path: string) => {
const s = await vault.adapter.stat(path);
if (s === undefined || s === null) {
return s;
throw Error(`${path} doesn't exist cannot run stat`);
}
if (s.ctime === undefined || s.ctime === null || Number.isNaN(s.ctime)) {
s.ctime = undefined as any; // force assignment
@ -446,7 +443,7 @@ export const isSpecialFolderNameToSkip = (
x: string,
more: string[] | undefined
) => {
let specialFolders = [
const specialFolders = [
".git",
".github",
".gitlab",
@ -595,6 +592,7 @@ export const changeMobileStatusBar = (
if (oldAppContainerObserver !== undefined) {
console.debug(`disconnect oldAppContainerObserver`);
oldAppContainerObserver.disconnect();
// biome-ignore lint/style/noParameterAssign: we want gc
oldAppContainerObserver = undefined;
}
statusbar.style.removeProperty("display");
@ -631,7 +629,6 @@ export const fixEntityListCasesInplace = (entities: { keyRaw: string }[]) => {
caseMapping[newKeyRaw.toLocaleLowerCase()] = newKeyRaw;
e.keyRaw = newKeyRaw;
// console.log(JSON.stringify(caseMapping,null,2));
continue;
} else {
throw Error(`${parentFolder} doesn't have cases record??`);
}
@ -642,7 +639,6 @@ export const fixEntityListCasesInplace = (entities: { keyRaw: string }[]) => {
.slice(-1)
.join("/")}`;
e.keyRaw = newKeyRaw;
continue;
} else {
throw Error(`${parentFolder} doesn't have cases record??`);
}
@ -651,3 +647,72 @@ export const fixEntityListCasesInplace = (entities: { keyRaw: string }[]) => {
return entities;
};
/**
* https://stackoverflow.com/questions/1248302/how-to-get-the-size-of-a-javascript-object
* @param object
* @returns bytes
*/
export const roughSizeOfObject = (object: any) => {
const objectList: any[] = [];
const stack = [object];
let bytes = 0;
while (stack.length) {
const value = stack.pop();
switch (typeof value) {
case "boolean":
bytes += 4;
break;
case "string":
bytes += value.length * 2;
break;
case "number":
bytes += 8;
break;
case "object":
if (!objectList.includes(value)) {
objectList.push(value);
for (const prop in value) {
if (value.hasOwnProperty(prop)) {
stack.push(value[prop]);
}
}
}
break;
}
}
return bytes;
};
export const splitFileSizeToChunkRanges = (
totalSize: number,
chunkSize: number
) => {
if (totalSize < 0) {
throw Error(`totalSize should not be negative`);
}
if (chunkSize <= 0) {
throw Error(`chunkSize should not be negative or zero`);
}
if (totalSize === 0) {
return [];
}
if (totalSize <= chunkSize) {
return [{ start: 0, end: totalSize - 1 }];
}
const res: { start: number; end: number }[] = [];
const blocksCount = Math.ceil((totalSize * 1.0) / chunkSize);
for (let i = 0; i < blocksCount; ++i) {
res.push({
start: i * chunkSize,
end: Math.min((i + 1) * chunkSize - 1, totalSize - 1),
});
}
return res;
};

View File

@ -1,10 +1,10 @@
import type { Vault, Stat, ListedFiles } from "obsidian";
import type { Entity, MixedEntity } from "./baseTypes";
import type { ListedFiles, Vault } from "obsidian";
import type { Entity } from "./baseTypes";
import { Queue } from "@fyears/tsqueue";
import chunk from "lodash/chunk";
import flatten from "lodash/flatten";
import { statFix, isSpecialFolderNameToSkip } from "./misc";
import { isSpecialFolderNameToSkip, statFix } from "./misc";
const isPluginDirItself = (x: string, pluginId: string) => {
return (
@ -31,13 +31,6 @@ const isLikelyPluginSubFiles = (x: string) => {
return false;
};
export const isInsideObsFolder = (x: string, configDir: string) => {
if (!configDir.startsWith(".")) {
throw Error(`configDir should starts with . but we get ${configDir}`);
}
return x === configDir || x.startsWith(`${configDir}/`);
};
export const listFilesInObsFolder = async (
configDir: string,
vault: Vault,

View File

@ -1,27 +1,43 @@
import { unixTimeToStr } from "./misc";
import type { ProfilerConfig, SUPPORTED_SERVICES_TYPE } from "./baseTypes";
import { type InternalDBs, insertProfilerResultByVault } from "./localdb";
import { roughSizeOfObject, unixTimeToStr } from "./misc";
interface BreakPoint {
label: string;
fakeTimeMilli: number; // it's NOT a unix timestamp
indent: number;
size?: number;
}
export const DEFAULT_PROFILER_CONFIG: ProfilerConfig = {
enablePrinting: false,
recordSize: false,
};
export class Profiler {
startTime: number;
breakPoints: BreakPoint[];
indent: number;
constructor(label?: string) {
enablePrinting: boolean;
recordSize: boolean;
constructor(label?: string, enablePrinting?: boolean, recordSize?: boolean) {
this.breakPoints = [];
this.indent = 0;
this.startTime = 0;
this.enablePrinting = enablePrinting ?? false;
this.recordSize = recordSize ?? false;
if (label !== undefined) {
this.startTime = Date.now();
this.breakPoints.push({
const p = {
label: label,
fakeTimeMilli: performance.now(),
indent: this.indent,
});
};
this.breakPoints.push(p);
if (this.enablePrinting) {
console.debug(this.toString(-1));
}
}
}
@ -29,11 +45,36 @@ export class Profiler {
if (this.breakPoints.length === 0) {
this.startTime = Date.now();
}
this.breakPoints.push({
const p = {
label: label,
fakeTimeMilli: performance.now(),
indent: this.indent,
});
};
this.breakPoints.push(p);
if (this.enablePrinting) {
console.debug(this.toString(-1));
}
return this;
}
insertSize(label: string, obj: any) {
if (!this.recordSize) {
return;
}
if (this.breakPoints.length === 0) {
this.startTime = Date.now();
}
const p = {
label: label,
fakeTimeMilli: performance.now(),
indent: this.indent,
size: roughSizeOfObject(obj),
};
this.breakPoints.push(p);
if (this.enablePrinting) {
console.debug(this.toString(-1));
}
return this;
}
@ -55,7 +96,31 @@ export class Profiler {
return this;
}
toString() {
toString(idx?: number) {
if (idx !== undefined) {
let i = idx;
if (idx < 0) {
i = this.breakPoints.length + idx;
}
const label = this.breakPoints?.[i]["label"];
const indent = this.breakPoints?.[i]["indent"];
let millsec = 0;
if (i >= 1) {
millsec =
Math.round(
(this.breakPoints?.[i]["fakeTimeMilli"] -
this.breakPoints?.[i - 1]["fakeTimeMilli"]) *
10
) / 10.0;
}
let res = `${" ".repeat(indent)}[${label}]: ${millsec}ms`;
if (this.breakPoints[i].hasOwnProperty("size")) {
const size = this.breakPoints[i].size as number;
res += `, size=${size}`;
}
return res;
}
if (this.breakPoints.length === 0) {
return "nothing in profiler";
}
@ -65,18 +130,23 @@ export class Profiler {
if (i === 0) {
res += `\n[${this.breakPoints[i]["label"]}]: start`;
} else {
const label = this.breakPoints[i]["label"];
const indent = this.breakPoints[i]["indent"];
const millsec =
Math.round(
(this.breakPoints[i]["fakeTimeMilli"] -
this.breakPoints[i - 1]["fakeTimeMilli"]) *
10
) / 10.0;
res += `\n${" ".repeat(indent)}[${label}]: ${millsec}ms`;
res += `\n${this.toString(i)}`;
}
}
return res;
}
async save(
db: InternalDBs,
vaultRandomID: string,
remoteType: SUPPORTED_SERVICES_TYPE
) {
await insertProfilerResultByVault(
db,
this.toString(),
vaultRandomID,
remoteType
);
}
}

View File

@ -1,320 +0,0 @@
import { Vault } from "obsidian";
import type {
Entity,
DropboxConfig,
OnedriveConfig,
S3Config,
SUPPORTED_SERVICES_TYPE,
WebdavConfig,
UploadedType,
} from "./baseTypes";
import * as dropbox from "./remoteForDropbox";
import * as onedrive from "./remoteForOnedrive";
import * as s3 from "./remoteForS3";
import * as webdav from "./remoteForWebdav";
import { Cipher } from "./encryptUnified";
import { Profiler } from "./profiler";
export class RemoteClient {
readonly serviceType: SUPPORTED_SERVICES_TYPE;
readonly s3Config?: S3Config;
readonly webdavClient?: webdav.WrappedWebdavClient;
readonly webdavConfig?: WebdavConfig;
readonly dropboxClient?: dropbox.WrappedDropboxClient;
readonly dropboxConfig?: DropboxConfig;
readonly onedriveClient?: onedrive.WrappedOnedriveClient;
readonly onedriveConfig?: OnedriveConfig;
constructor(
serviceType: SUPPORTED_SERVICES_TYPE,
s3Config?: S3Config,
webdavConfig?: WebdavConfig,
dropboxConfig?: DropboxConfig,
onedriveConfig?: OnedriveConfig,
vaultName?: string,
saveUpdatedConfigFunc?: () => Promise<any>,
profiler?: Profiler
) {
this.serviceType = serviceType;
// the client may modify the config inplace,
// so we use a ref not copy of config here
if (serviceType === "s3") {
this.s3Config = s3Config;
} else if (serviceType === "webdav") {
if (vaultName === undefined || saveUpdatedConfigFunc === undefined) {
throw Error(
"remember to provide vault name and callback while init webdav client"
);
}
const remoteBaseDir = webdavConfig!.remoteBaseDir || vaultName;
this.webdavConfig = webdavConfig;
this.webdavClient = webdav.getWebdavClient(
this.webdavConfig!,
remoteBaseDir,
saveUpdatedConfigFunc
);
} else if (serviceType === "dropbox") {
if (vaultName === undefined || saveUpdatedConfigFunc === undefined) {
throw Error(
"remember to provide vault name and callback while init dropbox client"
);
}
const remoteBaseDir = dropboxConfig!.remoteBaseDir || vaultName;
this.dropboxConfig = dropboxConfig;
this.dropboxClient = dropbox.getDropboxClient(
this.dropboxConfig!,
remoteBaseDir,
saveUpdatedConfigFunc
);
} else if (serviceType === "onedrive") {
if (vaultName === undefined || saveUpdatedConfigFunc === undefined) {
throw Error(
"remember to provide vault name and callback while init onedrive client"
);
}
const remoteBaseDir = onedriveConfig!.remoteBaseDir || vaultName;
this.onedriveConfig = onedriveConfig;
this.onedriveClient = onedrive.getOnedriveClient(
this.onedriveConfig!,
remoteBaseDir,
saveUpdatedConfigFunc
);
} else {
throw Error(`not supported service type ${this.serviceType}`);
}
}
getRemoteMeta = async (fileOrFolderPath: string) => {
if (this.serviceType === "s3") {
return await s3.getRemoteMeta(
s3.getS3Client(this.s3Config!),
this.s3Config!,
fileOrFolderPath
);
} else if (this.serviceType === "webdav") {
return await webdav.getRemoteMeta(this.webdavClient!, fileOrFolderPath);
} else if (this.serviceType === "dropbox") {
return await dropbox.getRemoteMeta(this.dropboxClient!, fileOrFolderPath);
} else if (this.serviceType === "onedrive") {
return await onedrive.getRemoteMeta(
this.onedriveClient!,
fileOrFolderPath
);
} else {
throw Error(`not supported service type ${this.serviceType}`);
}
};
uploadToRemote = async (
fileOrFolderPath: string,
vault: Vault | undefined,
isRecursively: boolean,
cipher: Cipher,
remoteEncryptedKey: string = "",
foldersCreatedBefore: Set<string> | undefined = undefined,
uploadRaw: boolean = false,
rawContent: string | ArrayBuffer = ""
): Promise<UploadedType> => {
if (this.serviceType === "s3") {
return await s3.uploadToRemote(
s3.getS3Client(this.s3Config!),
this.s3Config!,
fileOrFolderPath,
vault,
isRecursively,
cipher,
remoteEncryptedKey,
uploadRaw,
rawContent
);
} else if (this.serviceType === "webdav") {
return await webdav.uploadToRemote(
this.webdavClient!,
fileOrFolderPath,
vault,
isRecursively,
cipher,
remoteEncryptedKey,
uploadRaw,
rawContent
);
} else if (this.serviceType === "dropbox") {
return await dropbox.uploadToRemote(
this.dropboxClient!,
fileOrFolderPath,
vault,
isRecursively,
cipher,
remoteEncryptedKey,
foldersCreatedBefore,
uploadRaw,
rawContent
);
} else if (this.serviceType === "onedrive") {
return await onedrive.uploadToRemote(
this.onedriveClient!,
fileOrFolderPath,
vault,
isRecursively,
cipher,
remoteEncryptedKey,
foldersCreatedBefore,
uploadRaw,
rawContent
);
} else {
throw Error(`not supported service type ${this.serviceType}`);
}
};
listAllFromRemote = async (): Promise<Entity[]> => {
if (this.serviceType === "s3") {
return await s3.listAllFromRemote(
s3.getS3Client(this.s3Config!),
this.s3Config!
);
} else if (this.serviceType === "webdav") {
return await webdav.listAllFromRemote(this.webdavClient!);
} else if (this.serviceType === "dropbox") {
return await dropbox.listAllFromRemote(this.dropboxClient!);
} else if (this.serviceType === "onedrive") {
return await onedrive.listAllFromRemote(this.onedriveClient!);
} else {
throw Error(`not supported service type ${this.serviceType}`);
}
};
downloadFromRemote = async (
fileOrFolderPath: string,
vault: Vault,
mtime: number,
cipher: Cipher,
remoteEncryptedKey: string = "",
skipSaving: boolean = false
) => {
if (this.serviceType === "s3") {
return await s3.downloadFromRemote(
s3.getS3Client(this.s3Config!),
this.s3Config!,
fileOrFolderPath,
vault,
mtime,
cipher,
remoteEncryptedKey,
skipSaving
);
} else if (this.serviceType === "webdav") {
return await webdav.downloadFromRemote(
this.webdavClient!,
fileOrFolderPath,
vault,
mtime,
cipher,
remoteEncryptedKey,
skipSaving
);
} else if (this.serviceType === "dropbox") {
return await dropbox.downloadFromRemote(
this.dropboxClient!,
fileOrFolderPath,
vault,
mtime,
cipher,
remoteEncryptedKey,
skipSaving
);
} else if (this.serviceType === "onedrive") {
return await onedrive.downloadFromRemote(
this.onedriveClient!,
fileOrFolderPath,
vault,
mtime,
cipher,
remoteEncryptedKey,
skipSaving
);
} else {
throw Error(`not supported service type ${this.serviceType}`);
}
};
deleteFromRemote = async (
fileOrFolderPath: string,
cipher: Cipher,
remoteEncryptedKey: string = "",
synthesizedFolder: boolean = false
) => {
if (this.serviceType === "s3") {
return await s3.deleteFromRemote(
s3.getS3Client(this.s3Config!),
this.s3Config!,
fileOrFolderPath,
cipher,
remoteEncryptedKey,
synthesizedFolder
);
} else if (this.serviceType === "webdav") {
return await webdav.deleteFromRemote(
this.webdavClient!,
fileOrFolderPath,
cipher,
remoteEncryptedKey
);
} else if (this.serviceType === "dropbox") {
return await dropbox.deleteFromRemote(
this.dropboxClient!,
fileOrFolderPath,
cipher,
remoteEncryptedKey
);
} else if (this.serviceType === "onedrive") {
return await onedrive.deleteFromRemote(
this.onedriveClient!,
fileOrFolderPath,
cipher,
remoteEncryptedKey
);
} else {
throw Error(`not supported service type ${this.serviceType}`);
}
};
checkConnectivity = async (callbackFunc?: any) => {
if (this.serviceType === "s3") {
return await s3.checkConnectivity(
s3.getS3Client(this.s3Config!),
this.s3Config!,
callbackFunc
);
} else if (this.serviceType === "webdav") {
return await webdav.checkConnectivity(this.webdavClient!, callbackFunc);
} else if (this.serviceType === "dropbox") {
return await dropbox.checkConnectivity(this.dropboxClient!, callbackFunc);
} else if (this.serviceType === "onedrive") {
return await onedrive.checkConnectivity(
this.onedriveClient!,
callbackFunc
);
} else {
throw Error(`not supported service type ${this.serviceType}`);
}
};
getUser = async () => {
if (this.serviceType === "dropbox") {
return await dropbox.getUserDisplayName(this.dropboxClient!);
} else if (this.serviceType === "onedrive") {
return await onedrive.getUserDisplayName(this.onedriveClient!);
} else {
throw Error(`not supported service type ${this.serviceType}`);
}
};
revokeAuth = async () => {
if (this.serviceType === "dropbox") {
return await dropbox.revokeAuth(this.dropboxClient!);
} else {
throw Error(`not supported service type ${this.serviceType}`);
}
};
}

View File

@ -1,828 +0,0 @@
import type { _Object } from "@aws-sdk/client-s3";
import {
DeleteObjectCommand,
GetObjectCommand,
HeadBucketCommand,
HeadObjectCommand,
HeadObjectCommandOutput,
ListObjectsV2Command,
ListObjectsV2CommandInput,
PutObjectCommand,
S3Client,
} from "@aws-sdk/client-s3";
import { Upload } from "@aws-sdk/lib-storage";
import { HttpHandler, HttpRequest, HttpResponse } from "@smithy/protocol-http";
import {
FetchHttpHandler,
FetchHttpHandlerOptions,
} from "@smithy/fetch-http-handler";
// @ts-ignore
import { requestTimeout } from "@smithy/fetch-http-handler/dist-es/request-timeout";
import { buildQueryString } from "@smithy/querystring-builder";
import { HeaderBag, HttpHandlerOptions, Provider } from "@aws-sdk/types";
import { Buffer } from "buffer";
import * as mime from "mime-types";
import { Vault, requestUrl, RequestUrlParam, Platform } from "obsidian";
import { Readable } from "stream";
import * as path from "path";
import AggregateError from "aggregate-error";
import {
DEFAULT_CONTENT_TYPE,
Entity,
S3Config,
UploadedType,
VALID_REQURL,
} from "./baseTypes";
import {
arrayBufferToBuffer,
bufferToArrayBuffer,
mkdirpInVault,
} from "./misc";
export { S3Client } from "@aws-sdk/client-s3";
import PQueue from "p-queue";
import { Cipher } from "./encryptUnified";
////////////////////////////////////////////////////////////////////////////////
// special handler using Obsidian requestUrl
////////////////////////////////////////////////////////////////////////////////
/**
* This is close to origin implementation of FetchHttpHandler
* https://github.com/aws/aws-sdk-js-v3/blob/main/packages/fetch-http-handler/src/fetch-http-handler.ts
* that is released under Apache 2 License.
* But this uses Obsidian requestUrl instead.
*/
class ObsHttpHandler extends FetchHttpHandler {
requestTimeoutInMs: number | undefined;
constructor(options?: FetchHttpHandlerOptions) {
super(options);
this.requestTimeoutInMs =
options === undefined ? undefined : options.requestTimeout;
}
async handle(
request: HttpRequest,
{ abortSignal }: HttpHandlerOptions = {}
): Promise<{ response: HttpResponse }> {
if (abortSignal?.aborted) {
const abortError = new Error("Request aborted");
abortError.name = "AbortError";
return Promise.reject(abortError);
}
let path = request.path;
if (request.query) {
const queryString = buildQueryString(request.query);
if (queryString) {
path += `?${queryString}`;
}
}
const { port, method } = request;
const url = `${request.protocol}//${request.hostname}${
port ? `:${port}` : ""
}${path}`;
const body =
method === "GET" || method === "HEAD" ? undefined : request.body;
const transformedHeaders: Record<string, string> = {};
for (const key of Object.keys(request.headers)) {
const keyLower = key.toLowerCase();
if (keyLower === "host" || keyLower === "content-length") {
continue;
}
transformedHeaders[keyLower] = request.headers[key];
}
let contentType: string | undefined = undefined;
if (transformedHeaders["content-type"] !== undefined) {
contentType = transformedHeaders["content-type"];
}
let transformedBody: any = body;
if (ArrayBuffer.isView(body)) {
transformedBody = bufferToArrayBuffer(body);
}
const param: RequestUrlParam = {
body: transformedBody,
headers: transformedHeaders,
method: method,
url: url,
contentType: contentType,
};
const raceOfPromises = [
requestUrl(param).then((rsp) => {
const headers = rsp.headers;
const headersLower: Record<string, string> = {};
for (const key of Object.keys(headers)) {
headersLower[key.toLowerCase()] = headers[key];
}
const stream = new ReadableStream<Uint8Array>({
start(controller) {
controller.enqueue(new Uint8Array(rsp.arrayBuffer));
controller.close();
},
});
return {
response: new HttpResponse({
headers: headersLower,
statusCode: rsp.status,
body: stream,
}),
};
}),
requestTimeout(this.requestTimeoutInMs),
];
if (abortSignal) {
raceOfPromises.push(
new Promise<never>((resolve, reject) => {
abortSignal.onabort = () => {
const abortError = new Error("Request aborted");
abortError.name = "AbortError";
reject(abortError);
};
})
);
}
return Promise.race(raceOfPromises);
}
}
////////////////////////////////////////////////////////////////////////////////
// other stuffs
////////////////////////////////////////////////////////////////////////////////
export const DEFAULT_S3_CONFIG: S3Config = {
s3Endpoint: "",
s3Region: "",
s3AccessKeyID: "",
s3SecretAccessKey: "",
s3BucketName: "",
bypassCorsLocally: true,
partsConcurrency: 20,
forcePathStyle: false,
remotePrefix: "",
useAccurateMTime: false, // it causes money, disable by default
};
export type S3ObjectType = _Object;
export const simpleTransRemotePrefix = (x: string) => {
if (x === undefined) {
return "";
}
let y = path.posix.normalize(x.trim());
if (y === undefined || y === "" || y === "/" || y === ".") {
return "";
}
if (y.startsWith("/")) {
y = y.slice(1);
}
if (!y.endsWith("/")) {
y = `${y}/`;
}
return y;
};
const getRemoteWithPrefixPath = (
fileOrFolderPath: string,
remotePrefix: string
) => {
let key = fileOrFolderPath;
if (fileOrFolderPath === "/" || fileOrFolderPath === "") {
// special
key = remotePrefix;
}
if (!fileOrFolderPath.startsWith("/")) {
key = `${remotePrefix}${fileOrFolderPath}`;
}
return key;
};
const getLocalNoPrefixPath = (
fileOrFolderPathWithRemotePrefix: string,
remotePrefix: string
) => {
if (
!(
fileOrFolderPathWithRemotePrefix === `${remotePrefix}` ||
fileOrFolderPathWithRemotePrefix.startsWith(`${remotePrefix}`)
)
) {
throw Error(
`"${fileOrFolderPathWithRemotePrefix}" doesn't starts with "${remotePrefix}"`
);
}
return fileOrFolderPathWithRemotePrefix.slice(`${remotePrefix}`.length);
};
const fromS3ObjectToEntity = (
x: S3ObjectType,
remotePrefix: string,
mtimeRecords: Record<string, number>,
ctimeRecords: Record<string, number>
) => {
// console.debug(`fromS3ObjectToEntity: ${x.Key!}, ${JSON.stringify(x,null,2)}`);
// S3 officially only supports seconds precision!!!!!
const mtimeSvr = Math.floor(x.LastModified!.valueOf() / 1000.0) * 1000;
let mtimeCli = mtimeSvr;
if (x.Key! in mtimeRecords) {
const m2 = mtimeRecords[x.Key!];
if (m2 !== 0) {
// to be compatible with RClone, we read and store the time in seconds in new version!
if (m2 >= 1000000000000) {
// it's a millsecond, uploaded by old codes..
mtimeCli = m2;
} else {
// it's a second, uploaded by new codes of the plugin from March 24, 2024
mtimeCli = m2 * 1000;
}
}
}
const key = getLocalNoPrefixPath(x.Key!, remotePrefix);
const r: Entity = {
keyRaw: key,
mtimeSvr: mtimeSvr,
mtimeCli: mtimeCli,
sizeRaw: x.Size!,
etag: x.ETag,
synthesizedFolder: false,
};
return r;
};
const fromS3HeadObjectToEntity = (
fileOrFolderPathWithRemotePrefix: string,
x: HeadObjectCommandOutput,
remotePrefix: string
) => {
// console.debug(`fromS3HeadObjectToEntity: ${fileOrFolderPathWithRemotePrefix}: ${JSON.stringify(x,null,2)}`);
// S3 officially only supports seconds precision!!!!!
const mtimeSvr = Math.floor(x.LastModified!.valueOf() / 1000.0) * 1000;
let mtimeCli = mtimeSvr;
if (x.Metadata !== undefined) {
const m2 = Math.floor(
parseFloat(x.Metadata.mtime || x.Metadata.MTime || "0")
);
if (m2 !== 0) {
// to be compatible with RClone, we read and store the time in seconds in new version!
if (m2 >= 1000000000000) {
// it's a millsecond, uploaded by old codes..
mtimeCli = m2;
} else {
// it's a second, uploaded by new codes of the plugin from March 24, 2024
mtimeCli = m2 * 1000;
}
}
}
// console.debug(
// `fromS3HeadObjectToEntity, fileOrFolderPathWithRemotePrefix=${fileOrFolderPathWithRemotePrefix}, remotePrefix=${remotePrefix}, x=${JSON.stringify(
// x
// )} `
// );
const key = getLocalNoPrefixPath(
fileOrFolderPathWithRemotePrefix,
remotePrefix
);
// console.debug(`fromS3HeadObjectToEntity, key=${key} after removing prefix`);
return {
keyRaw: key,
mtimeSvr: mtimeSvr,
mtimeCli: mtimeCli,
sizeRaw: x.ContentLength,
etag: x.ETag,
} as Entity;
};
export const getS3Client = (s3Config: S3Config) => {
let endpoint = s3Config.s3Endpoint;
if (!(endpoint.startsWith("http://") || endpoint.startsWith("https://"))) {
endpoint = `https://${endpoint}`;
}
let s3Client: S3Client;
if (VALID_REQURL && s3Config.bypassCorsLocally) {
s3Client = new S3Client({
region: s3Config.s3Region,
endpoint: endpoint,
forcePathStyle: s3Config.forcePathStyle,
credentials: {
accessKeyId: s3Config.s3AccessKeyID,
secretAccessKey: s3Config.s3SecretAccessKey,
},
requestHandler: new ObsHttpHandler(),
});
} else {
s3Client = new S3Client({
region: s3Config.s3Region,
endpoint: endpoint,
forcePathStyle: s3Config.forcePathStyle,
credentials: {
accessKeyId: s3Config.s3AccessKeyID,
secretAccessKey: s3Config.s3SecretAccessKey,
},
});
}
s3Client.middlewareStack.add(
(next, context) => (args) => {
(args.request as any).headers["cache-control"] = "no-cache";
return next(args);
},
{
step: "build",
}
);
return s3Client;
};
export const getRemoteMeta = async (
s3Client: S3Client,
s3Config: S3Config,
fileOrFolderPathWithRemotePrefix: string
) => {
if (
s3Config.remotePrefix !== undefined &&
s3Config.remotePrefix !== "" &&
!fileOrFolderPathWithRemotePrefix.startsWith(s3Config.remotePrefix)
) {
throw Error(`s3 getRemoteMeta should only accept prefix-ed path`);
}
const res = await s3Client.send(
new HeadObjectCommand({
Bucket: s3Config.s3BucketName,
Key: fileOrFolderPathWithRemotePrefix,
})
);
return fromS3HeadObjectToEntity(
fileOrFolderPathWithRemotePrefix,
res,
s3Config.remotePrefix ?? ""
);
};
export const uploadToRemote = async (
s3Client: S3Client,
s3Config: S3Config,
fileOrFolderPath: string,
vault: Vault | undefined,
isRecursively: boolean,
cipher: Cipher,
remoteEncryptedKey: string = "",
uploadRaw: boolean = false,
rawContent: string | ArrayBuffer = "",
rawContentMTime: number = 0,
rawContentCTime: number = 0
): Promise<UploadedType> => {
console.debug(`uploading ${fileOrFolderPath}`);
let uploadFile = fileOrFolderPath;
if (!cipher.isPasswordEmpty()) {
if (remoteEncryptedKey === undefined || remoteEncryptedKey === "") {
throw Error(
`uploadToRemote(s3) you have password but remoteEncryptedKey is empty!`
);
}
uploadFile = remoteEncryptedKey;
}
uploadFile = getRemoteWithPrefixPath(uploadFile, s3Config.remotePrefix ?? "");
// console.debug(`actual uploadFile=${uploadFile}`);
const isFolder = fileOrFolderPath.endsWith("/");
if (isFolder && isRecursively) {
throw Error("upload function doesn't implement recursive function yet!");
} else if (isFolder && !isRecursively) {
if (uploadRaw) {
throw Error(`you specify uploadRaw, but you also provide a folder key!`);
}
// folder
let mtime = 0;
let ctime = 0;
const s = await vault?.adapter?.stat(fileOrFolderPath);
if (s !== undefined && s !== null) {
mtime = s.mtime;
ctime = s.ctime;
}
const contentType = DEFAULT_CONTENT_TYPE;
await s3Client.send(
new PutObjectCommand({
Bucket: s3Config.s3BucketName,
Key: uploadFile,
Body: "",
ContentType: contentType,
Metadata: {
MTime: `${mtime / 1000.0}`,
CTime: `${ctime / 1000.0}`,
},
})
);
const res = await getRemoteMeta(s3Client, s3Config, uploadFile);
return {
entity: res,
mtimeCli: mtime,
};
} else {
// file
// we ignore isRecursively parameter here
let contentType = DEFAULT_CONTENT_TYPE;
if (cipher.isPasswordEmpty()) {
contentType =
mime.contentType(
mime.lookup(fileOrFolderPath) || DEFAULT_CONTENT_TYPE
) || DEFAULT_CONTENT_TYPE;
}
let localContent = undefined;
let mtime = 0;
let ctime = 0;
if (uploadRaw) {
if (typeof rawContent === "string") {
localContent = new TextEncoder().encode(rawContent).buffer;
} else {
localContent = rawContent;
}
mtime = rawContentMTime;
ctime = rawContentCTime;
} else {
if (vault === undefined) {
throw new Error(
`the vault variable is not passed but we want to read ${fileOrFolderPath} for S3`
);
}
localContent = await vault.adapter.readBinary(fileOrFolderPath);
const s = await vault.adapter.stat(fileOrFolderPath);
if (s !== undefined && s !== null) {
mtime = s.mtime;
ctime = s.ctime;
}
}
let remoteContent = localContent;
if (!cipher.isPasswordEmpty()) {
remoteContent = await cipher.encryptContent(localContent);
}
const bytesIn5MB = 5242880;
const body = new Uint8Array(remoteContent);
const upload = new Upload({
client: s3Client,
queueSize: s3Config.partsConcurrency, // concurrency
partSize: bytesIn5MB, // minimal 5MB by default
leavePartsOnError: false,
params: {
Bucket: s3Config.s3BucketName,
Key: uploadFile,
Body: body,
ContentType: contentType,
Metadata: {
MTime: `${mtime / 1000.0}`,
CTime: `${ctime / 1000.0}`,
},
},
});
upload.on("httpUploadProgress", (progress) => {
// console.info(progress);
});
await upload.done();
const res = await getRemoteMeta(s3Client, s3Config, uploadFile);
// console.debug(
// `uploaded ${uploadFile} with res=${JSON.stringify(res, null, 2)}`
// );
return {
entity: res,
mtimeCli: mtime,
};
}
};
const listFromRemoteRaw = async (
s3Client: S3Client,
s3Config: S3Config,
prefixOfRawKeys?: string
) => {
const confCmd = {
Bucket: s3Config.s3BucketName,
} as ListObjectsV2CommandInput;
if (prefixOfRawKeys !== undefined && prefixOfRawKeys !== "") {
confCmd.Prefix = prefixOfRawKeys;
}
const contents = [] as _Object[];
const mtimeRecords: Record<string, number> = {};
const ctimeRecords: Record<string, number> = {};
const queueHead = new PQueue({
concurrency: s3Config.partsConcurrency,
autoStart: true,
});
queueHead.on("error", (error) => {
queueHead.pause();
queueHead.clear();
throw error;
});
let isTruncated = true;
do {
const rsp = await s3Client.send(new ListObjectsV2Command(confCmd));
if (rsp.$metadata.httpStatusCode !== 200) {
throw Error("some thing bad while listing remote!");
}
if (rsp.Contents === undefined) {
break;
}
contents.push(...rsp.Contents);
if (s3Config.useAccurateMTime) {
// head requests of all objects, love it
for (const content of rsp.Contents) {
queueHead.add(async () => {
const rspHead = await s3Client.send(
new HeadObjectCommand({
Bucket: s3Config.s3BucketName,
Key: content.Key,
})
);
if (rspHead.$metadata.httpStatusCode !== 200) {
throw Error("some thing bad while heading single object!");
}
if (rspHead.Metadata === undefined) {
// pass
} else {
mtimeRecords[content.Key!] = Math.floor(
parseFloat(
rspHead.Metadata.mtime || rspHead.Metadata.MTime || "0"
)
);
ctimeRecords[content.Key!] = Math.floor(
parseFloat(
rspHead.Metadata.ctime || rspHead.Metadata.CTime || "0"
)
);
}
});
}
}
isTruncated = rsp.IsTruncated ?? false;
confCmd.ContinuationToken = rsp.NextContinuationToken;
if (
isTruncated &&
(confCmd.ContinuationToken === undefined ||
confCmd.ContinuationToken === "")
) {
throw Error("isTruncated is true but no continuationToken provided");
}
} while (isTruncated);
// wait for any head requests
await queueHead.onIdle();
// ensemble fake rsp
// in the end, we need to transform the response list
// back to the local contents-alike list
return contents.map((x) =>
fromS3ObjectToEntity(
x,
s3Config.remotePrefix ?? "",
mtimeRecords,
ctimeRecords
)
);
};
export const listAllFromRemote = async (
s3Client: S3Client,
s3Config: S3Config
) => {
const res = (
await listFromRemoteRaw(s3Client, s3Config, s3Config.remotePrefix)
).filter((x) => x.keyRaw !== "" && x.keyRaw !== "/");
return res;
};
/**
* The Body of resp of aws GetObject has mix types
* and we want to get ArrayBuffer here.
* See https://github.com/aws/aws-sdk-js-v3/issues/1877
* @param b The Body of GetObject
* @returns Promise<ArrayBuffer>
*/
const getObjectBodyToArrayBuffer = async (
b: Readable | ReadableStream | Blob | undefined
) => {
if (b === undefined) {
throw Error(`ObjectBody is undefined and don't know how to deal with it`);
}
if (b instanceof Readable) {
return (await new Promise((resolve, reject) => {
const chunks: Uint8Array[] = [];
b.on("data", (chunk) => chunks.push(chunk));
b.on("error", reject);
b.on("end", () => resolve(bufferToArrayBuffer(Buffer.concat(chunks))));
})) as ArrayBuffer;
} else if (b instanceof ReadableStream) {
return await new Response(b, {}).arrayBuffer();
} else if (b instanceof Blob) {
return await b.arrayBuffer();
} else {
throw TypeError(`The type of ${b} is not one of the supported types`);
}
};
const downloadFromRemoteRaw = async (
s3Client: S3Client,
s3Config: S3Config,
fileOrFolderPathWithRemotePrefix: string
) => {
if (
s3Config.remotePrefix !== undefined &&
s3Config.remotePrefix !== "" &&
!fileOrFolderPathWithRemotePrefix.startsWith(s3Config.remotePrefix)
) {
throw Error(`downloadFromRemoteRaw should only accept prefix-ed path`);
}
const data = await s3Client.send(
new GetObjectCommand({
Bucket: s3Config.s3BucketName,
Key: fileOrFolderPathWithRemotePrefix,
})
);
const bodyContents = await getObjectBodyToArrayBuffer(data.Body);
return bodyContents;
};
export const downloadFromRemote = async (
s3Client: S3Client,
s3Config: S3Config,
fileOrFolderPath: string,
vault: Vault,
mtime: number,
cipher: Cipher,
remoteEncryptedKey: string,
skipSaving: boolean = false
) => {
const isFolder = fileOrFolderPath.endsWith("/");
if (!skipSaving) {
await mkdirpInVault(fileOrFolderPath, vault);
}
// the file is always local file
// we need to encrypt it
if (isFolder) {
// mkdirp locally is enough
// do nothing here
return new ArrayBuffer(0);
} else {
let downloadFile = fileOrFolderPath;
if (!cipher.isPasswordEmpty()) {
downloadFile = remoteEncryptedKey;
}
downloadFile = getRemoteWithPrefixPath(
downloadFile,
s3Config.remotePrefix ?? ""
);
const remoteContent = await downloadFromRemoteRaw(
s3Client,
s3Config,
downloadFile
);
let localContent = remoteContent;
if (!cipher.isPasswordEmpty()) {
localContent = await cipher.decryptContent(remoteContent);
}
if (!skipSaving) {
await vault.adapter.writeBinary(fileOrFolderPath, localContent, {
mtime: mtime,
});
}
return localContent;
}
};
/**
* This function deals with file normally and "folder" recursively.
* @param s3Client
* @param s3Config
* @param fileOrFolderPath
* @returns
*/
export const deleteFromRemote = async (
s3Client: S3Client,
s3Config: S3Config,
fileOrFolderPath: string,
cipher: Cipher,
remoteEncryptedKey: string = "",
synthesizedFolder: boolean = false
) => {
if (fileOrFolderPath === "/") {
return;
}
if (synthesizedFolder) {
return;
}
let remoteFileName = fileOrFolderPath;
if (!cipher.isPasswordEmpty()) {
remoteFileName = remoteEncryptedKey;
}
remoteFileName = getRemoteWithPrefixPath(
remoteFileName,
s3Config.remotePrefix ?? ""
);
await s3Client.send(
new DeleteObjectCommand({
Bucket: s3Config.s3BucketName,
Key: remoteFileName,
})
);
if (fileOrFolderPath.endsWith("/") && cipher.isPasswordEmpty()) {
const x = await listFromRemoteRaw(s3Client, s3Config, remoteFileName);
x.forEach(async (element) => {
await s3Client.send(
new DeleteObjectCommand({
Bucket: s3Config.s3BucketName,
Key: element.key,
})
);
});
} else if (fileOrFolderPath.endsWith("/") && !cipher.isPasswordEmpty()) {
// TODO
} else {
// pass
}
};
/**
* Check the config of S3 by heading bucket
* https://stackoverflow.com/questions/50842835
*
* Updated on 20240102:
* Users are not always have permission of heading bucket,
* so we need to use listing objects instead...
*
* @param s3Client
* @param s3Config
* @returns
*/
export const checkConnectivity = async (
s3Client: S3Client,
s3Config: S3Config,
callbackFunc?: any
) => {
try {
// TODO: no universal way now, just check this in connectivity
if (Platform.isIosApp && s3Config.s3Endpoint.startsWith("http://")) {
throw Error(
`Your s3 endpoint could only be https, not http, because of the iOS restriction.`
);
}
// const results = await s3Client.send(
// new HeadBucketCommand({ Bucket: s3Config.s3BucketName })
// );
// very simplified version of listing objects
const confCmd = {
Bucket: s3Config.s3BucketName,
} as ListObjectsV2CommandInput;
const results = await s3Client.send(new ListObjectsV2Command(confCmd));
if (
results === undefined ||
results.$metadata === undefined ||
results.$metadata.httpStatusCode === undefined
) {
const err = "results or $metadata or httStatusCode is undefined";
console.debug(err);
if (callbackFunc !== undefined) {
callbackFunc(err);
}
return false;
}
return results.$metadata.httpStatusCode === 200;
} catch (err: any) {
console.debug(err);
if (callbackFunc !== undefined) {
if (s3Config.s3Endpoint.contains(s3Config.s3BucketName)) {
const err2 = new AggregateError([
err,
new Error(
"Maybe you've included the bucket name inside the endpoint setting. Please remove the bucket name and try again."
),
]);
callbackFunc(err2);
} else {
callbackFunc(err);
}
}
return false;
}
};

View File

@ -1,608 +0,0 @@
import { Buffer } from "buffer";
import { Platform, Vault, requestUrl } from "obsidian";
import { Queue } from "@fyears/tsqueue";
import chunk from "lodash/chunk";
import flatten from "lodash/flatten";
import cloneDeep from "lodash/cloneDeep";
import { getReasonPhrase } from "http-status-codes";
import { Entity, UploadedType, VALID_REQURL, WebdavConfig } from "./baseTypes";
import { bufferToArrayBuffer, getPathFolder, mkdirpInVault } from "./misc";
import { Cipher } from "./encryptUnified";
import type {
FileStat,
WebDAVClient,
RequestOptionsWithState,
// Response,
// ResponseDataDetailed,
} from "webdav";
/**
* https://stackoverflow.com/questions/32850898/how-to-check-if-a-string-has-any-non-iso-8859-1-characters-with-javascript
* @param str
* @returns true if all are iso 8859 1 chars
*/
function onlyAscii(str: string) {
return !/[^\u0000-\u00ff]/g.test(str);
}
/**
* https://stackoverflow.com/questions/12539574/
* @param obj
* @returns
*/
function objKeyToLower(obj: Record<string, string>) {
return Object.fromEntries(
Object.entries(obj).map(([k, v]) => [k.toLowerCase(), v])
);
}
// @ts-ignore
import { getPatcher } from "webdav/dist/web/index.js";
if (VALID_REQURL) {
getPatcher().patch(
"request",
async (options: RequestOptionsWithState): Promise<Response> => {
const transformedHeaders = objKeyToLower({ ...options.headers });
delete transformedHeaders["host"];
delete transformedHeaders["content-length"];
const reqContentType =
transformedHeaders["accept"] ?? transformedHeaders["content-type"];
const retractedHeaders = { ...transformedHeaders };
if (retractedHeaders.hasOwnProperty("authorization")) {
retractedHeaders["authorization"] = "<retracted>";
}
console.debug(`before request:`);
console.debug(`url: ${options.url}`);
console.debug(`method: ${options.method}`);
console.debug(`headers: ${JSON.stringify(retractedHeaders, null, 2)}`);
console.debug(`reqContentType: ${reqContentType}`);
let r = await requestUrl({
url: options.url,
method: options.method,
body: options.data as string | ArrayBuffer,
headers: transformedHeaders,
contentType: reqContentType,
throw: false,
});
if (
r.status === 401 &&
Platform.isIosApp &&
!options.url.endsWith("/") &&
!options.url.endsWith(".md") &&
options.method.toUpperCase() === "PROPFIND"
) {
// don't ask me why,
// some webdav servers have some mysterious behaviours,
// if a folder doesn't exist without slash, the servers return 401 instead of 404
// here is a dirty hack that works
console.debug(`so we have 401, try appending request url with slash`);
r = await requestUrl({
url: `${options.url}/`,
method: options.method,
body: options.data as string | ArrayBuffer,
headers: transformedHeaders,
contentType: reqContentType,
throw: false,
});
}
console.debug(`after request:`);
const rspHeaders = objKeyToLower({ ...r.headers });
console.debug(`rspHeaders: ${JSON.stringify(rspHeaders, null, 2)}`);
for (let key in rspHeaders) {
if (rspHeaders.hasOwnProperty(key)) {
// avoid the error:
// Failed to read the 'headers' property from 'ResponseInit': String contains non ISO-8859-1 code point.
// const possibleNonAscii = [
// "Content-Disposition",
// "X-Accel-Redirect",
// "X-Outfilename",
// "X-Sendfile"
// ];
// for (const p of possibleNonAscii) {
// if (key === p || key === p.toLowerCase()) {
// rspHeaders[key] = encodeURIComponent(rspHeaders[key]);
// }
// }
if (!onlyAscii(rspHeaders[key])) {
console.debug(`rspHeaders[key] needs encode: ${key}`);
rspHeaders[key] = encodeURIComponent(rspHeaders[key]);
}
}
}
let r2: Response | undefined = undefined;
const statusText = getReasonPhrase(r.status);
console.debug(`statusText: ${statusText}`);
if ([101, 103, 204, 205, 304].includes(r.status)) {
// A null body status is a status that is 101, 103, 204, 205, or 304.
// https://fetch.spec.whatwg.org/#statuses
// fix this: Failed to construct 'Response': Response with null body status cannot have body
r2 = new Response(null, {
status: r.status,
statusText: statusText,
headers: rspHeaders,
});
} else {
r2 = new Response(r.arrayBuffer, {
status: r.status,
statusText: statusText,
headers: rspHeaders,
});
}
return r2;
}
);
}
// @ts-ignore
import { AuthType, BufferLike, createClient } from "webdav/dist/web/index.js";
export type { WebDAVClient } from "webdav";
export const DEFAULT_WEBDAV_CONFIG = {
address: "",
username: "",
password: "",
authType: "basic",
manualRecursive: true,
depth: "manual_1",
remoteBaseDir: "",
} as WebdavConfig;
const getWebdavPath = (fileOrFolderPath: string, remoteBaseDir: string) => {
let key = fileOrFolderPath;
if (fileOrFolderPath === "/" || fileOrFolderPath === "") {
// special
key = `/${remoteBaseDir}/`;
} else if (fileOrFolderPath.startsWith("/")) {
console.warn(
`why the path ${fileOrFolderPath} starts with '/'? but we just go on.`
);
key = `/${remoteBaseDir}${fileOrFolderPath}`;
} else {
key = `/${remoteBaseDir}/${fileOrFolderPath}`;
}
return key;
};
const getNormPath = (fileOrFolderPath: string, remoteBaseDir: string) => {
if (
!(
fileOrFolderPath === `/${remoteBaseDir}` ||
fileOrFolderPath.startsWith(`/${remoteBaseDir}/`)
)
) {
throw Error(
`"${fileOrFolderPath}" doesn't starts with "/${remoteBaseDir}/"`
);
}
// if (fileOrFolderPath.startsWith("/")) {
// return fileOrFolderPath.slice(1);
// }
return fileOrFolderPath.slice(`/${remoteBaseDir}/`.length);
};
const fromWebdavItemToEntity = (x: FileStat, remoteBaseDir: string) => {
let key = getNormPath(x.filename, remoteBaseDir);
if (x.type === "directory" && !key.endsWith("/")) {
key = `${key}/`;
}
const mtimeSvr = Date.parse(x.lastmod).valueOf();
return {
keyRaw: key,
mtimeSvr: mtimeSvr,
mtimeCli: mtimeSvr, // no universal way to set mtime in webdav
sizeRaw: x.size,
etag: x.etag,
} as Entity;
};
export class WrappedWebdavClient {
webdavConfig: WebdavConfig;
remoteBaseDir: string;
client!: WebDAVClient;
vaultFolderExists: boolean;
saveUpdatedConfigFunc: () => Promise<any>;
constructor(
webdavConfig: WebdavConfig,
remoteBaseDir: string,
saveUpdatedConfigFunc: () => Promise<any>
) {
this.webdavConfig = cloneDeep(webdavConfig);
this.webdavConfig.address = encodeURI(this.webdavConfig.address);
this.remoteBaseDir = remoteBaseDir;
this.vaultFolderExists = false;
this.saveUpdatedConfigFunc = saveUpdatedConfigFunc;
}
init = async () => {
// init client if not inited
if (this.client !== undefined) {
return;
}
if (Platform.isIosApp && !this.webdavConfig.address.startsWith("https")) {
throw Error(
`Your webdav address could only be https, not http, because of the iOS restriction.`
);
}
const headers = {
"Cache-Control": "no-cache",
};
if (
this.webdavConfig.username !== "" &&
this.webdavConfig.password !== ""
) {
this.client = createClient(this.webdavConfig.address, {
username: this.webdavConfig.username,
password: this.webdavConfig.password,
headers: headers,
authType:
this.webdavConfig.authType === "digest"
? AuthType.Digest
: AuthType.Password,
});
} else {
console.info("no password");
this.client = createClient(this.webdavConfig.address, {
headers: headers,
});
}
// check vault folder
if (this.vaultFolderExists) {
// pass
} else {
const res = await this.client.exists(`/${this.remoteBaseDir}/`);
if (res) {
// console.info("remote vault folder exits!");
this.vaultFolderExists = true;
} else {
console.info("remote vault folder not exists, creating");
await this.client.createDirectory(`/${this.remoteBaseDir}/`);
console.info("remote vault folder created!");
this.vaultFolderExists = true;
}
}
// adjust depth parameter
if (
this.webdavConfig.depth === "auto" ||
this.webdavConfig.depth === "auto_1" ||
this.webdavConfig.depth === "auto_infinity" ||
this.webdavConfig.depth === "auto_unknown"
) {
this.webdavConfig.depth = "manual_1";
this.webdavConfig.manualRecursive = true;
if (this.saveUpdatedConfigFunc !== undefined) {
await this.saveUpdatedConfigFunc();
console.info(
`webdav depth="auto_???" is changed to ${this.webdavConfig.depth}`
);
}
}
};
}
export const getWebdavClient = (
webdavConfig: WebdavConfig,
remoteBaseDir: string,
saveUpdatedConfigFunc: () => Promise<any>
) => {
return new WrappedWebdavClient(
webdavConfig,
remoteBaseDir,
saveUpdatedConfigFunc
);
};
/**
*
* @param client
* @param remotePath It should be prefix-ed already
* @returns
*/
export const getRemoteMeta = async (
client: WrappedWebdavClient,
remotePath: string
) => {
await client.init();
console.debug(`getRemoteMeta remotePath = ${remotePath}`);
const res = (await client.client.stat(remotePath, {
details: false,
})) as FileStat;
console.debug(`getRemoteMeta res=${JSON.stringify(res)}`);
return fromWebdavItemToEntity(res, client.remoteBaseDir);
};
export const uploadToRemote = async (
client: WrappedWebdavClient,
fileOrFolderPath: string,
vault: Vault | undefined,
isRecursively: boolean,
cipher: Cipher,
remoteEncryptedKey: string = "",
uploadRaw: boolean = false,
rawContent: string | ArrayBuffer = ""
): Promise<UploadedType> => {
await client.init();
let uploadFile = fileOrFolderPath;
if (!cipher.isPasswordEmpty()) {
if (remoteEncryptedKey === undefined || remoteEncryptedKey === "") {
throw Error(
`uploadToRemote(webdav) you have password but remoteEncryptedKey is empty!`
);
}
uploadFile = remoteEncryptedKey;
}
uploadFile = getWebdavPath(uploadFile, client.remoteBaseDir);
const isFolder = fileOrFolderPath.endsWith("/");
if (isFolder && isRecursively) {
throw Error("upload function doesn't implement recursive function yet!");
} else if (isFolder && !isRecursively) {
if (uploadRaw) {
throw Error(`you specify uploadRaw, but you also provide a folder key!`);
}
// folder
if (cipher.isPasswordEmpty() || cipher.isFolderAware()) {
// if not encrypted, || encrypted isFolderAware, mkdir a remote folder
await client.client.createDirectory(uploadFile, {
recursive: true,
});
const res = await getRemoteMeta(client, uploadFile);
return {
entity: res,
};
} else {
// if encrypted && !isFolderAware(),
// upload a fake file with the encrypted file name
await client.client.putFileContents(uploadFile, "", {
overwrite: true,
onUploadProgress: (progress: any) => {
// console.info(`Uploaded ${progress.loaded} bytes of ${progress.total}`);
},
});
return {
entity: await getRemoteMeta(client, uploadFile),
};
}
} else {
// file
// we ignore isRecursively parameter here
let localContent: ArrayBuffer | undefined = undefined;
let mtimeCli: number | undefined = undefined;
if (uploadRaw) {
if (typeof rawContent === "string") {
localContent = new TextEncoder().encode(rawContent).buffer;
} else {
localContent = rawContent;
}
} else {
if (vault == undefined) {
throw new Error(
`the vault variable is not passed but we want to read ${fileOrFolderPath} for webdav`
);
}
localContent = await vault.adapter.readBinary(fileOrFolderPath);
mtimeCli = (await vault.adapter.stat(fileOrFolderPath))?.mtime;
}
let remoteContent = localContent;
if (!cipher.isPasswordEmpty()) {
remoteContent = await cipher.encryptContent(localContent);
}
// updated 20220326: the algorithm guarantee this
// // we need to create folders before uploading
// const dir = getPathFolder(uploadFile);
// if (dir !== "/" && dir !== "") {
// await client.client.createDirectory(dir, { recursive: true });
// }
await client.client.putFileContents(uploadFile, remoteContent, {
overwrite: true,
onUploadProgress: (progress: any) => {
console.info(`Uploaded ${progress.loaded} bytes of ${progress.total}`);
},
});
return {
entity: await getRemoteMeta(client, uploadFile),
mtimeCli: mtimeCli,
};
}
};
export const listAllFromRemote = async (client: WrappedWebdavClient) => {
await client.init();
let contents = [] as FileStat[];
if (
client.webdavConfig.depth === "auto" ||
client.webdavConfig.depth === "auto_unknown" ||
client.webdavConfig.depth === "auto_1" ||
client.webdavConfig.depth === "auto_infinity" /* don't trust auto now */ ||
client.webdavConfig.depth === "manual_1"
) {
// the remote doesn't support infinity propfind,
// we need to do a bfs here
const q = new Queue([`/${client.remoteBaseDir}`]);
const CHUNK_SIZE = 10;
while (q.length > 0) {
const itemsToFetch: string[] = [];
while (q.length > 0) {
itemsToFetch.push(q.pop()!);
}
const itemsToFetchChunks = chunk(itemsToFetch, CHUNK_SIZE);
// console.debug(itemsToFetchChunks);
const subContents = [] as FileStat[];
for (const singleChunk of itemsToFetchChunks) {
const r = singleChunk.map((x) => {
return client.client.getDirectoryContents(x, {
deep: false,
details: false /* no need for verbose details here */,
// TODO: to support .obsidian,
// we need to load all files including dot,
// anyway to reduce the resources?
// glob: "/**" /* avoid dot files by using glob */,
}) as Promise<FileStat[]>;
});
const r2 = flatten(await Promise.all(r));
subContents.push(...r2);
}
for (let i = 0; i < subContents.length; ++i) {
const f = subContents[i];
contents.push(f);
if (f.type === "directory") {
q.push(f.filename);
}
}
}
} else {
// the remote supports infinity propfind
contents = (await client.client.getDirectoryContents(
`/${client.remoteBaseDir}`,
{
deep: true,
details: false /* no need for verbose details here */,
// TODO: to support .obsidian,
// we need to load all files including dot,
// anyway to reduce the resources?
// glob: "/**" /* avoid dot files by using glob */,
}
)) as FileStat[];
}
return contents.map((x) => fromWebdavItemToEntity(x, client.remoteBaseDir));
};
const downloadFromRemoteRaw = async (
client: WrappedWebdavClient,
remotePath: string
) => {
await client.init();
// console.info(`getWebdavPath=${remotePath}`);
const buff = (await client.client.getFileContents(remotePath)) as BufferLike;
if (buff instanceof ArrayBuffer) {
return buff;
} else if (buff instanceof Buffer) {
return bufferToArrayBuffer(buff);
}
throw Error(`unexpected file content result with type ${typeof buff}`);
};
export const downloadFromRemote = async (
client: WrappedWebdavClient,
fileOrFolderPath: string,
vault: Vault,
mtime: number,
cipher: Cipher,
remoteEncryptedKey: string = "",
skipSaving: boolean = false
) => {
await client.init();
const isFolder = fileOrFolderPath.endsWith("/");
if (!skipSaving) {
await mkdirpInVault(fileOrFolderPath, vault);
}
// the file is always local file
// we need to encrypt it
if (isFolder) {
// mkdirp locally is enough
// do nothing here
return new ArrayBuffer(0);
} else {
let downloadFile = fileOrFolderPath;
if (!cipher.isPasswordEmpty()) {
downloadFile = remoteEncryptedKey;
}
downloadFile = getWebdavPath(downloadFile, client.remoteBaseDir);
// console.info(`downloadFile=${downloadFile}`);
const remoteContent = await downloadFromRemoteRaw(client, downloadFile);
let localContent = remoteContent;
if (!cipher.isPasswordEmpty()) {
localContent = await cipher.decryptContent(remoteContent);
}
if (!skipSaving) {
await vault.adapter.writeBinary(fileOrFolderPath, localContent, {
mtime: mtime,
});
}
return localContent;
}
};
export const deleteFromRemote = async (
client: WrappedWebdavClient,
fileOrFolderPath: string,
cipher: Cipher,
remoteEncryptedKey: string = ""
) => {
if (fileOrFolderPath === "/") {
return;
}
let remoteFileName = fileOrFolderPath;
if (!cipher.isPasswordEmpty()) {
remoteFileName = remoteEncryptedKey;
}
remoteFileName = getWebdavPath(remoteFileName, client.remoteBaseDir);
await client.init();
try {
await client.client.deleteFile(remoteFileName);
// console.info(`delete ${remoteFileName} succeeded`);
} catch (err) {
console.error("some error while deleting");
console.error(err);
}
};
export const checkConnectivity = async (
client: WrappedWebdavClient,
callbackFunc?: any
) => {
if (
!(
client.webdavConfig.address.startsWith("http://") ||
client.webdavConfig.address.startsWith("https://")
)
) {
const err = "Error: the url should start with http(s):// but it does not!";
console.error(err);
if (callbackFunc !== undefined) {
callbackFunc(err);
}
return false;
}
try {
await client.init();
const results = await getRemoteMeta(client, `/${client.remoteBaseDir}/`);
if (results === undefined) {
const err = "results is undefined";
console.error(err);
if (callbackFunc !== undefined) {
callbackFunc(err);
}
return false;
}
return true;
} catch (err) {
console.error(err);
if (callbackFunc !== undefined) {
callbackFunc(err);
}
return false;
}
};

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,6 @@
import { App, Modal, Notice, PluginSettingTab, Setting } from "obsidian";
import type RemotelySavePlugin from "./main"; // unavoidable
import { type App, Modal } from "obsidian";
import type { TransItemType } from "./i18n";
import type RemotelySavePlugin from "./main"; // unavoidable
import { stringToFragment } from "./misc";
@ -17,7 +17,7 @@ export class SyncAlgoV3Modal extends Modal {
this.requireUpdateAllDev = false;
}
onOpen() {
let { contentEl } = this;
const { contentEl } = this;
const t = (x: TransItemType, vars?: any) => {
return this.plugin.i18n.t(x, vars);
};
@ -112,14 +112,14 @@ export class SyncAlgoV3Modal extends Modal {
}
onClose() {
let { contentEl } = this;
const { contentEl } = this;
contentEl.empty();
if (this.agree) {
console.info("agree to use the new algorithm");
this.plugin.saveAgreeToUseNewSyncAlgorithm();
this.plugin.enableAutoSyncIfSet();
this.plugin.enableInitSyncIfSet();
this.plugin.enableSyncOnSaveIfSet();
this.plugin.toggleSyncOnSaveIfSet();
} else {
console.info("do not agree to use the new algorithm");
this.plugin.unload();

View File

@ -17,6 +17,10 @@
padding-top: 18px;
}
.settings-percentage-custom-hide {
display: none;
}
.s3-disclaimer {
font-weight: bold;
}
@ -61,6 +65,32 @@
display: none;
}
.webdis-disclaimer {
font-weight: bold;
}
.webdis-hide {
display: none;
}
.googledrive-disclaimer {
font-weight: bold;
}
.googledrive-hide {
display: none;
}
.googledrive-allow-to-use-hide {
display: none;
}
.googledrive-auth-button-hide {
display: none;
}
.googledrive-revoke-auth-button-hide {
display: none;
}
.qrcode-img {
width: 350px;
height: 350px;
@ -74,3 +104,23 @@
color: red;
font-weight: bolder;
}
.setting-need-wrapping .setting-item-control {
/* flex-wrap: wrap; */
display: grid;
}
.pro-disclaimer {
font-weight: bold;
}
.pro-hide {
display: none;
}
.pro-auth-button-hide {
display: none;
}
.pro-revoke-auth-button-hide {
display: none;
}

View File

@ -1,12 +1,8 @@
import * as chai from "chai";
import chaiAsPromised from "chai-as-promised";
import { strict as assert } from "assert";
import { RemotelySavePluginSettings } from "../src/baseTypes";
import type { RemotelySavePluginSettings } from "../src/baseTypes";
import { messyConfigToNormal, normalConfigToMessy } from "../src/configPersist";
chai.use(chaiAsPromised);
const expect = chai.expect;
const DEFAULT_SETTINGS: RemotelySavePluginSettings = {
s3: {
s3AccessKeyID: "acc",
@ -20,6 +16,12 @@ const DEFAULT_SETTINGS: RemotelySavePluginSettings = {
onedrive: {
username: "test 🍎 emoji",
} as any,
webdis: {
address: "addr",
} as any,
googledrive: {
refreshToken: "xxx",
} as any,
password: "password",
serviceType: "s3",
currLogLevel: "info",
@ -32,6 +34,6 @@ describe("Config Persist tests", () => {
const k = DEFAULT_SETTINGS;
const k2 = normalConfigToMessy(k);
const k3 = messyConfigToNormal(k2);
expect(k3).to.deep.equal(k);
assert.deepEqual(k3, k);
});
});

View File

@ -1,5 +1,4 @@
import * as chai from "chai";
import chaiAsPromised from "chai-as-promised";
import { strict as assert } from "assert";
import * as fs from "fs";
import * as path from "path";
import {
@ -13,11 +12,8 @@ import {
} from "../src/encryptOpenSSL";
import { base64ToBase64url, bufferToArrayBuffer } from "../src/misc";
chai.use(chaiAsPromised);
const expect = chai.expect;
describe("Encryption OpenSSL tests", () => {
beforeEach(function () {
beforeEach(() => {
global.window = {
crypto: require("crypto").webcrypto,
} as any;
@ -26,7 +22,7 @@ describe("Encryption OpenSSL tests", () => {
it("should encrypt string", async () => {
const k = "dkjdhkfhdkjgsdklxxd";
const password = "hey";
expect(await encryptStringToBase32(k, password)).to.not.equal(k);
assert.notEqual(await encryptStringToBase32(k, password), k);
});
it("should encrypt string and return different results each time", async () => {
@ -34,7 +30,7 @@ describe("Encryption OpenSSL tests", () => {
const password = "hey";
const res1 = await encryptStringToBase32(k, password);
const res2 = await encryptStringToBase32(k, password);
expect(res1).to.not.equal(res2);
assert.notEqual(res1, res2);
});
it("should raise error using different password", async () => {
@ -42,7 +38,7 @@ describe("Encryption OpenSSL tests", () => {
const password = "hey";
const password2 = "hey2";
const enc = await encryptStringToBase32(k, password);
await expect(decryptBase32ToString(enc, password2)).to.be.rejected;
await assert.rejects(decryptBase32ToString(enc, password2));
});
it("should encrypt and decrypt string and get the same result returned", async () => {
@ -52,7 +48,7 @@ describe("Encryption OpenSSL tests", () => {
// console.log(enc);
const dec = await decryptBase32ToString(enc, password);
// console.log(dec);
expect(dec).equal(k);
assert.equal(dec, k);
});
it("should encrypt text file and get the same result as openssl", async () => {
@ -78,7 +74,7 @@ describe("Encryption OpenSSL tests", () => {
// we output base32, so we need some transformation
const opensslBase64urlRes = base64ToBase64url(opensslBase64Res);
expect(enc).equal(opensslBase64urlRes);
assert.equal(enc, opensslBase64urlRes);
});
it("should encrypt binary file and get the same result as openssl", async () => {
@ -102,7 +98,7 @@ describe("Encryption OpenSSL tests", () => {
// openssl enc -p -aes-256-cbc -S 8302F586FAB491EC -pbkdf2 -iter 20000 -pass pass:somepassword -in mona_lisa/1374px-Mona_Lisa,_by_Leonardo_da_Vinci,_from_C2RMF_retouched.jpg -out mona_lisa/1374px-Mona_Lisa,_by_Leonardo_da_Vinci,_from_C2RMF_retouched.jpg.enc
expect(Buffer.from(enc).equals(Buffer.from(opensslArrBuf))).to.be.true;
assert.ok(Buffer.from(enc).equals(Buffer.from(opensslArrBuf)));
});
it("should encrypt binary file not deterministically", async () => {
@ -116,7 +112,7 @@ describe("Encryption OpenSSL tests", () => {
const res1 = await encryptArrayBuffer(fileArrBuf, password);
const res2 = await encryptArrayBuffer(fileArrBuf, password);
expect(Buffer.from(res1).equals(Buffer.from(res2))).to.be.false;
assert.ok(!Buffer.from(res1).equals(Buffer.from(res2)));
});
it("should decrypt binary file and get the same result as openssl", async () => {
@ -132,36 +128,36 @@ describe("Encryption OpenSSL tests", () => {
await fs.readFileSync(path.join(testFolder, testFileName))
);
expect(Buffer.from(dec).equals(Buffer.from(opensslArrBuf))).to.be.true;
assert.deepEqual(Buffer.from(dec), Buffer.from(opensslArrBuf));
});
it("should get size from origin to encrypted correctly", () => {
expect(() => getSizeFromOrigToEnc(-1)).to.throw();
expect(() => getSizeFromOrigToEnc(0.5)).to.throw();
expect(getSizeFromOrigToEnc(0)).equals(32);
expect(getSizeFromOrigToEnc(15)).equals(32);
expect(getSizeFromOrigToEnc(16)).equals(48);
expect(getSizeFromOrigToEnc(31)).equals(48);
expect(getSizeFromOrigToEnc(32)).equals(64);
expect(getSizeFromOrigToEnc(14787203)).equals(14787232);
assert.throws(() => getSizeFromOrigToEnc(-1));
assert.throws(() => getSizeFromOrigToEnc(0.5));
assert.equal(getSizeFromOrigToEnc(0), 32);
assert.equal(getSizeFromOrigToEnc(15), 32);
assert.equal(getSizeFromOrigToEnc(16), 48);
assert.equal(getSizeFromOrigToEnc(31), 48);
assert.equal(getSizeFromOrigToEnc(32), 64);
assert.equal(getSizeFromOrigToEnc(14787203), 14787232);
});
it("should get size from encrypted to origin correctly", () => {
expect(() => getSizeFromEncToOrig(-1)).to.throw();
expect(() => getSizeFromEncToOrig(30)).to.throw();
assert.throws(() => getSizeFromEncToOrig(-1));
assert.throws(() => getSizeFromEncToOrig(30));
expect(getSizeFromEncToOrig(32)).to.deep.equal({
assert.deepEqual(getSizeFromEncToOrig(32), {
minSize: 0,
maxSize: 15,
});
expect(getSizeFromEncToOrig(48)).to.deep.equal({
assert.deepEqual(getSizeFromEncToOrig(48), {
minSize: 16,
maxSize: 31,
});
expect(() => getSizeFromEncToOrig(14787231)).to.throw();
assert.throws(() => getSizeFromEncToOrig(14787231));
let { minSize, maxSize } = getSizeFromEncToOrig(14787232);
expect(minSize <= 14787203 && 14787203 <= maxSize).to.be.true;
const { minSize, maxSize } = getSizeFromEncToOrig(14787232);
assert.ok(minSize <= 14787203 && 14787203 <= maxSize);
});
});

12
tests/fsWebdis.test.ts Normal file
View File

@ -0,0 +1,12 @@
import { strict as assert } from "assert";
import { getOrigPath } from "../src/fsWebdis";
describe("Webdis operations tests", () => {
it("should get orig keys correctly", () => {
const input = "rs:fs:v1:库名字/something dev.md:meta";
const output = getOrigPath(input, "库名字");
const expected = "something dev.md";
assert.equal(output, expected);
});
});

View File

@ -1,14 +1,9 @@
import * as chai from "chai";
import chaiAsPromised from "chai-as-promised";
import { strict as assert } from "assert";
import {
type MetadataOnRemote,
isEqualMetadataOnRemote,
MetadataOnRemote,
} from "../src/metadataOnRemote";
chai.use(chaiAsPromised);
const expect = chai.expect;
describe("Metadata operations tests", () => {
it("should compare objects deeply", async () => {
const a: MetadataOnRemote = {
@ -24,7 +19,7 @@ describe("Metadata operations tests", () => {
],
};
expect(isEqualMetadataOnRemote(a, b));
assert.ok(isEqualMetadataOnRemote(a, b));
});
it("should find diff", async () => {
@ -41,7 +36,7 @@ describe("Metadata operations tests", () => {
],
};
expect(!isEqualMetadataOnRemote(a, b));
assert.ok(!isEqualMetadataOnRemote(a, b));
});
it("should treat undefined correctly", async () => {
@ -53,22 +48,22 @@ describe("Metadata operations tests", () => {
],
};
expect(!isEqualMetadataOnRemote(a, b));
assert.ok(!isEqualMetadataOnRemote(a, b));
b = { deletions: [] };
expect(isEqualMetadataOnRemote(a, b));
assert.ok(isEqualMetadataOnRemote(a, b));
b = { deletions: undefined };
expect(isEqualMetadataOnRemote(a, b));
assert.ok(isEqualMetadataOnRemote(a, b));
b = undefined;
expect(isEqualMetadataOnRemote(a, b));
assert.ok(isEqualMetadataOnRemote(a, b));
});
it("should ignore generated at fields", async () => {
const a: MetadataOnRemote = {
deletions: [
{ key: "xxxx", actionWhen: 1 },
{ key: "xxx", actionWhen: 1 },
{ key: "yyy", actionWhen: 2 },
],
generatedWhen: 1,
@ -81,6 +76,6 @@ describe("Metadata operations tests", () => {
generatedWhen: 2,
};
expect(isEqualMetadataOnRemote(a, b));
assert.ok(isEqualMetadataOnRemote(a, b));
});
});

View File

@ -1,139 +1,139 @@
import { expect } from "chai";
import { strict as assert } from "assert";
import { JSDOM } from "jsdom";
import * as misc from "../src/misc";
describe("Misc: hidden file", () => {
it("should find hidden file correctly", () => {
let item = "";
expect(misc.isHiddenPath(item)).to.be.false;
assert.ok(!misc.isHiddenPath(item));
item = ".";
expect(misc.isHiddenPath(item)).to.be.false;
assert.ok(!misc.isHiddenPath(item));
item = "..";
expect(misc.isHiddenPath(item)).to.be.false;
assert.ok(!misc.isHiddenPath(item));
item = "/x/y/z/../././../a/b/c";
expect(misc.isHiddenPath(item)).to.be.false;
assert.ok(!misc.isHiddenPath(item));
item = ".hidden";
expect(misc.isHiddenPath(item)).to.be.true;
assert.ok(misc.isHiddenPath(item));
item = "_hidden_loose";
expect(misc.isHiddenPath(item)).to.be.true;
expect(misc.isHiddenPath(item, true, false)).to.be.false;
assert.ok(misc.isHiddenPath(item));
assert.ok(!misc.isHiddenPath(item, true, false));
item = "/sdd/_hidden_loose";
expect(misc.isHiddenPath(item)).to.be.true;
assert.ok(misc.isHiddenPath(item));
item = "what/../_hidden_loose/what/what/what";
expect(misc.isHiddenPath(item)).to.be.true;
assert.ok(misc.isHiddenPath(item));
item = "what/../_hidden_loose/what/what/what";
expect(misc.isHiddenPath(item, true, false)).to.be.false;
assert.ok(!misc.isHiddenPath(item, true, false));
item = "what/../_hidden_loose/../.hidden/what/what/what";
expect(misc.isHiddenPath(item, true, false)).to.be.true;
assert.ok(misc.isHiddenPath(item, true, false));
item = "what/../_hidden_loose/../.hidden/what/what/what";
expect(misc.isHiddenPath(item, false, true)).to.be.false;
assert.ok(!misc.isHiddenPath(item, false, true));
item = "what/_hidden_loose/what/what/what";
expect(misc.isHiddenPath(item, false, true)).to.be.true;
expect(misc.isHiddenPath(item, true, false)).to.be.false;
assert.ok(misc.isHiddenPath(item, false, true));
assert.ok(!misc.isHiddenPath(item, true, false));
item = "what/.hidden/what/what/what";
expect(misc.isHiddenPath(item, false, true)).to.be.false;
expect(misc.isHiddenPath(item, true, false)).to.be.true;
assert.ok(!misc.isHiddenPath(item, false, true));
assert.ok(misc.isHiddenPath(item, true, false));
});
});
describe("Misc: get folder levels", () => {
it("should ignore empty path", () => {
const item = "";
expect(misc.getFolderLevels(item)).to.be.empty;
assert.equal(misc.getFolderLevels(item).length, 0);
});
it("should ignore single file", () => {
const item = "xxx";
expect(misc.getFolderLevels(item)).to.be.empty;
assert.equal(misc.getFolderLevels(item).length, 0);
});
it("should detect path ending with /", () => {
const item = "xxx/";
const res = ["xxx"];
expect(misc.getFolderLevels(item)).to.deep.equal(res);
assert.deepEqual(misc.getFolderLevels(item), res);
});
it("should correctly split folders and files", () => {
const item = "xxx/yyy/zzz.md";
const res = ["xxx", "xxx/yyy"];
expect(misc.getFolderLevels(item)).to.deep.equal(res);
assert.deepEqual(misc.getFolderLevels(item), res);
const item2 = "xxx/yyy/zzz";
const res2 = ["xxx", "xxx/yyy"];
expect(misc.getFolderLevels(item2)).to.deep.equal(res2);
assert.deepEqual(misc.getFolderLevels(item2), res2);
const item3 = "xxx/yyy/zzz/";
const res3 = ["xxx", "xxx/yyy", "xxx/yyy/zzz"];
expect(misc.getFolderLevels(item3)).to.deep.equal(res3);
assert.deepEqual(misc.getFolderLevels(item3), res3);
});
it("should correctly add ending slash if required", () => {
const item = "xxx/yyy/zzz.md";
const res = ["xxx/", "xxx/yyy/"];
expect(misc.getFolderLevels(item, true)).to.deep.equal(res);
assert.deepEqual(misc.getFolderLevels(item, true), res);
const item2 = "xxx/yyy/zzz";
const res2 = ["xxx/", "xxx/yyy/"];
expect(misc.getFolderLevels(item2, true)).to.deep.equal(res2);
assert.deepEqual(misc.getFolderLevels(item2, true), res2);
const item3 = "xxx/yyy/zzz/";
const res3 = ["xxx/", "xxx/yyy/", "xxx/yyy/zzz/"];
expect(misc.getFolderLevels(item3, true)).to.deep.equal(res3);
assert.deepEqual(misc.getFolderLevels(item3, true), res3);
});
it("should treat path starting with / correctly", () => {
const item = "/xxx/yyy/zzz.md";
const res = ["/xxx", "/xxx/yyy"];
expect(misc.getFolderLevels(item)).to.deep.equal(res);
assert.deepEqual(misc.getFolderLevels(item), res);
const item2 = "/xxx/yyy/zzz";
const res2 = ["/xxx", "/xxx/yyy"];
expect(misc.getFolderLevels(item2)).to.deep.equal(res2);
assert.deepEqual(misc.getFolderLevels(item2), res2);
const item3 = "/xxx/yyy/zzz/";
const res3 = ["/xxx", "/xxx/yyy", "/xxx/yyy/zzz"];
expect(misc.getFolderLevels(item3)).to.deep.equal(res3);
assert.deepEqual(misc.getFolderLevels(item3), res3);
const item4 = "/xxx";
const res4 = [] as string[];
expect(misc.getFolderLevels(item4)).to.deep.equal(res4);
assert.deepEqual(misc.getFolderLevels(item4), res4);
const item5 = "/";
const res5 = [] as string[];
expect(misc.getFolderLevels(item5)).to.deep.equal(res5);
assert.deepEqual(misc.getFolderLevels(item5), res5);
});
});
describe("Misc: get parent folder", () => {
it("should treat empty path correctly", () => {
const item = "";
expect(misc.getParentFolder(item)).equals("/");
assert.equal(misc.getParentFolder(item), "/");
});
it("should treat one level path correctly", () => {
let item = "abc/";
expect(misc.getParentFolder(item)).equals("/");
assert.equal(misc.getParentFolder(item), "/");
item = "/efg/";
expect(misc.getParentFolder(item)).equals("/");
assert.equal(misc.getParentFolder(item), "/");
});
it("should treat more levels path correctly", () => {
let item = "abc/efg";
expect(misc.getParentFolder(item)).equals("abc/");
assert.equal(misc.getParentFolder(item), "abc/");
item = "/hij/klm/";
expect(misc.getParentFolder(item)).equals("/hij/");
assert.equal(misc.getParentFolder(item), "/hij/");
});
});
@ -141,18 +141,18 @@ describe("Misc: vaild file name tests", () => {
it("should treat no ascii correctly", async () => {
const x = misc.isVaildText("😄🍎 apple 苹果");
// console.log(x)
expect(x).to.be.true;
assert.ok(x);
});
it("should find not-printable chars correctly", async () => {
const x = misc.isVaildText("😄🍎 apple 苹果\u0000");
// console.log(x)
expect(x).to.be.false;
assert.ok(!x);
});
it("should allow spaces/slashes/...", async () => {
const x = misc.isVaildText("😄🍎 apple 苹果/-_=/\\*%^&@#$`");
expect(x).to.be.true;
assert.ok(x);
});
});
@ -160,26 +160,26 @@ describe("Misc: get dirname", () => {
it("should return itself for folder", async () => {
const x = misc.getPathFolder("ssss/");
// console.log(x)
expect(x).to.equal("ssss/");
assert.equal(x, "ssss/");
});
it("should return folder for file", async () => {
const x = misc.getPathFolder("sss/yyy");
// console.log(x)
expect(x).to.equal("sss/");
assert.equal(x, "sss/");
});
it("should treat / specially", async () => {
const x = misc.getPathFolder("/");
expect(x).to.equal("/");
assert.equal(x, "/");
const y = misc.getPathFolder("/abc");
expect(y).to.equal("/");
assert.equal(y, "/");
});
});
describe("Misc: extract svg", () => {
beforeEach(function () {
beforeEach(() => {
const fakeBrowser = new JSDOM("");
global.window = fakeBrowser.window as any;
});
@ -188,7 +188,7 @@ describe("Misc: extract svg", () => {
const x = "<svg><rect/><g/></svg>";
const y = misc.extractSvgSub(x);
// console.log(x)
expect(y).to.equal("<rect/><g/>");
assert.equal(y, "<rect/><g/>");
});
});
@ -202,7 +202,7 @@ describe("Misc: get split ranges", () => {
end: 10,
},
];
expect(k).to.deep.equal(k2);
assert.deepEqual(k, k2);
});
it("should deal with 0 remainder", () => {
@ -219,7 +219,7 @@ describe("Misc: get split ranges", () => {
end: 20,
},
];
expect(k).to.deep.equal(k2);
assert.deepEqual(k, k2);
});
it("should deal with not-0 remainder", () => {
@ -241,55 +241,132 @@ describe("Misc: get split ranges", () => {
end: 25,
},
];
expect(k).to.deep.equal(k2);
assert.deepEqual(k, k2);
});
});
describe("Misc: at which level", () => {
it("should throw error on some parameters", () => {
expect(() => misc.atWhichLevel(undefined)).to.throw();
expect(() => misc.atWhichLevel("")).to.throw();
expect(() => misc.atWhichLevel("..")).to.throw();
expect(() => misc.atWhichLevel(".")).to.throw();
expect(() => misc.atWhichLevel("/")).to.throw();
expect(() => misc.atWhichLevel("/xxyy")).to.throw();
assert.throws(() => misc.atWhichLevel(undefined));
assert.throws(() => misc.atWhichLevel(""));
assert.throws(() => misc.atWhichLevel(".."));
assert.throws(() => misc.atWhichLevel("."));
assert.throws(() => misc.atWhichLevel("/"));
assert.throws(() => misc.atWhichLevel("/xxyy"));
});
it("should treat folders correctly", () => {
expect(misc.atWhichLevel("x/")).to.be.equal(1);
expect(misc.atWhichLevel("x/y/")).to.be.equal(2);
assert.equal(misc.atWhichLevel("x/"), 1);
assert.equal(misc.atWhichLevel("x/y/"), 2);
});
it("should treat files correctly", () => {
expect(misc.atWhichLevel("x.md")).to.be.equal(1);
expect(misc.atWhichLevel("x/y.md")).to.be.equal(2);
expect(misc.atWhichLevel("x/y/z.md")).to.be.equal(3);
assert.equal(misc.atWhichLevel("x.md"), 1);
assert.equal(misc.atWhichLevel("x/y.md"), 2);
assert.equal(misc.atWhichLevel("x/y/z.md"), 3);
});
});
describe("Misc: special char for dir", () => {
it("should return false for normal string", () => {
expect(misc.checkHasSpecialCharForDir("")).to.be.false;
expect(misc.checkHasSpecialCharForDir("xxx")).to.be.false;
expect(misc.checkHasSpecialCharForDir("yyy_xxx")).to.be.false;
expect(misc.checkHasSpecialCharForDir("yyy.xxx")).to.be.false;
expect(misc.checkHasSpecialCharForDir("yyyxxx")).to.be.false;
assert.ok(!misc.checkHasSpecialCharForDir(""));
assert.ok(!misc.checkHasSpecialCharForDir("xxx"));
assert.ok(!misc.checkHasSpecialCharForDir("yyy_xxx"));
assert.ok(!misc.checkHasSpecialCharForDir("yyy.xxx"));
assert.ok(!misc.checkHasSpecialCharForDir("yyyxxx"));
});
it("should return true for special cases", () => {
expect(misc.checkHasSpecialCharForDir("?")).to.be.true;
expect(misc.checkHasSpecialCharForDir("/")).to.be.true;
expect(misc.checkHasSpecialCharForDir("\\")).to.be.true;
expect(misc.checkHasSpecialCharForDir("xxx/yyy")).to.be.true;
expect(misc.checkHasSpecialCharForDir("xxx\\yyy")).to.be.true;
expect(misc.checkHasSpecialCharForDir("xxx?yyy")).to.be.true;
assert.ok(misc.checkHasSpecialCharForDir("?"));
assert.ok(misc.checkHasSpecialCharForDir("/"));
assert.ok(misc.checkHasSpecialCharForDir("\\"));
assert.ok(misc.checkHasSpecialCharForDir("xxx/yyy"));
assert.ok(misc.checkHasSpecialCharForDir("xxx\\yyy"));
assert.ok(misc.checkHasSpecialCharForDir("xxx?yyy"));
});
});
describe("Misc: split chunk ranges", () => {
it("should fail on negative numner", () => {
assert.throws(() => misc.splitFileSizeToChunkRanges(-1, 2));
assert.throws(() => misc.splitFileSizeToChunkRanges(1, -1));
assert.throws(() => misc.splitFileSizeToChunkRanges(1, 0));
});
it("should return nothing for 0 input", () => {
let input: [number, number] = [0, 1];
let output: any = [];
assert.deepStrictEqual(output, misc.splitFileSizeToChunkRanges(...input));
input = [0, 100];
output = [];
assert.deepStrictEqual(output, misc.splitFileSizeToChunkRanges(...input));
});
it("should return single item for 1 input", () => {
let input: [number, number] = [1, 1];
let output = [{ start: 0, end: 0 }];
assert.deepStrictEqual(output, misc.splitFileSizeToChunkRanges(...input));
input = [1, 100];
output = [{ start: 0, end: 0 }];
assert.deepStrictEqual(output, misc.splitFileSizeToChunkRanges(...input));
});
it("should return single item for larger or equal input", () => {
let input: [number, number] = [10, 10];
let output = [{ start: 0, end: 9 }];
assert.deepStrictEqual(output, misc.splitFileSizeToChunkRanges(...input));
input = [10, 21];
output = [{ start: 0, end: 9 }];
assert.deepStrictEqual(output, misc.splitFileSizeToChunkRanges(...input));
});
it("should return correct items for normal input", () => {
let input: [number, number] = [10, 9];
let output = [
{ start: 0, end: 8 },
{ start: 9, end: 9 },
];
assert.deepStrictEqual(output, misc.splitFileSizeToChunkRanges(...input));
input = [10, 5];
output = [
{ start: 0, end: 4 },
{ start: 5, end: 9 },
];
assert.deepStrictEqual(output, misc.splitFileSizeToChunkRanges(...input));
input = [3, 1];
output = [
{ start: 0, end: 0 },
{ start: 1, end: 1 },
{ start: 2, end: 2 },
];
assert.deepStrictEqual(output, misc.splitFileSizeToChunkRanges(...input));
input = [15, 5];
output = [
{ start: 0, end: 4 },
{ start: 5, end: 9 },
{ start: 10, end: 14 },
];
assert.deepStrictEqual(output, misc.splitFileSizeToChunkRanges(...input));
input = [1024, 578];
output = [
{ start: 0, end: 577 },
{ start: 578, end: 1023 },
];
assert.deepStrictEqual(output, misc.splitFileSizeToChunkRanges(...input));
});
});
describe("Misc: Dropbox: should fix the folder name cases", () => {
it("should do nothing on empty folders", () => {
const input: any[] = [];
expect(misc.fixEntityListCasesInplace(input)).to.be.empty;
assert.equal(misc.fixEntityListCasesInplace(input).length, 0);
});
it("should sort folders by length by side effect", () => {
@ -306,7 +383,7 @@ describe("Misc: Dropbox: should fix the folder name cases", () => {
{ keyRaw: "bbb/" },
{ keyRaw: "aaaa/" },
];
expect(misc.fixEntityListCasesInplace(input)).to.deep.equal(output);
assert.deepEqual(misc.fixEntityListCasesInplace(input), output);
});
it("should fix folder names", () => {
@ -335,6 +412,6 @@ describe("Misc: Dropbox: should fix the folder name cases", () => {
{ keyRaw: "ddd/eee/fff.md" },
{ keyRaw: "Ggg/Hhh你好/Fff世界.md" },
];
expect(misc.fixEntityListCasesInplace(input)).to.deep.equal(output);
assert.deepEqual(misc.fixEntityListCasesInplace(input), output);
});
});

View File

@ -6,6 +6,11 @@ const TerserPlugin = require("terser-webpack-plugin");
const DEFAULT_DROPBOX_APP_KEY = process.env.DROPBOX_APP_KEY || "";
const DEFAULT_ONEDRIVE_CLIENT_ID = process.env.ONEDRIVE_CLIENT_ID || "";
const DEFAULT_ONEDRIVE_AUTHORITY = process.env.ONEDRIVE_AUTHORITY || "";
const DEFAULT_REMOTELYSAVE_WEBSITE = process.env.REMOTELYSAVE_WEBSITE || "";
const DEFAULT_REMOTELYSAVE_CLIENT_ID = process.env.REMOTELYSAVE_CLIENT_ID || "";
const DEFAULT_GOOGLEDRIVE_CLIENT_ID = process.env.GOOGLEDRIVE_CLIENT_ID || "";
const DEFAULT_GOOGLEDRIVE_CLIENT_SECRET =
process.env.GOOGLEDRIVE_CLIENT_SECRET || "";
module.exports = {
entry: "./src/main.ts",
@ -20,6 +25,10 @@ module.exports = {
"process.env.DEFAULT_DROPBOX_APP_KEY": `"${DEFAULT_DROPBOX_APP_KEY}"`,
"process.env.DEFAULT_ONEDRIVE_CLIENT_ID": `"${DEFAULT_ONEDRIVE_CLIENT_ID}"`,
"process.env.DEFAULT_ONEDRIVE_AUTHORITY": `"${DEFAULT_ONEDRIVE_AUTHORITY}"`,
"process.env.DEFAULT_REMOTELYSAVE_WEBSITE": `"${DEFAULT_REMOTELYSAVE_WEBSITE}"`,
"process.env.DEFAULT_REMOTELYSAVE_CLIENT_ID": `"${DEFAULT_REMOTELYSAVE_CLIENT_ID}"`,
"process.env.DEFAULT_GOOGLEDRIVE_CLIENT_ID": `"${DEFAULT_GOOGLEDRIVE_CLIENT_ID}"`,
"process.env.DEFAULT_GOOGLEDRIVE_CLIENT_SECRET": `"${DEFAULT_GOOGLEDRIVE_CLIENT_SECRET}"`,
}),
// Work around for Buffer is undefined:
// https://github.com/webpack/changelog-v5/issues/10
@ -68,6 +77,7 @@ module.exports = {
// crypto: false,
// domain: require.resolve("domain-browser"),
// events: require.resolve("events"),
fs: false,
http: false,
// http: require.resolve("stream-http"),
https: false,
@ -87,6 +97,7 @@ module.exports = {
url: require.resolve("url/"),
// util: require.resolve("util"),
// vm: require.resolve("vm-browserify"),
vm: false,
// zlib: require.resolve("browserify-zlib"),
},
},