Compare commits

...

68 Commits

Author SHA1 Message Date
fyears 1e4d729eb7 make the wording explict 2024-06-02 23:50:54 +08:00
fyears 64371b7d65 bump to 0.5.3 2024-06-02 23:38:44 +08:00
fyears e116bb1deb google drive is usable now 2024-06-02 23:37:53 +08:00
fyears 2ace90155c fix build and bump to 0.5.2 2024-05-27 00:53:07 +08:00
fyears d885a4c743 remove useless code 2024-05-27 00:36:52 +08:00
fyears b6d46d8b91 remove https detection 2024-05-27 00:35:55 +08:00
fyears 06dad54d4c pro and smart conflict 2024-05-27 00:33:49 +08:00
fyears 0802767726 bump to 0.4.25 2024-05-25 15:58:52 +08:00
fyears be4a2d3271 nextcloud address 2024-05-25 15:57:48 +08:00
fyears 7ca2d19255 more debug info 2024-05-25 15:53:38 +08:00
fyears ff765d5ae7 change back to local time 2024-05-25 15:53:27 +08:00
fyears bdbf0b1484 nextcloud is good 2024-05-25 15:39:29 +08:00
fyears 7497b5fae7 split export 2024-05-25 15:22:19 +08:00
fyears 408acb6230 add custom protection 2024-05-25 14:37:37 +08:00
fyears d9cab7b1ff fix kind 2024-05-24 23:13:40 +08:00
fyears 5e53967e01 bump to 0.4.24 2024-05-24 23:11:37 +08:00
fyears fb9f4a67b4 fix size bug 2024-05-24 23:11:01 +08:00
fyears 26a426dda8 bump to 0.4.23 2024-05-24 22:42:21 +08:00
fyears de64c3c53f fix condition for partial update 2024-05-24 22:41:43 +08:00
fyears 0cefafa491 special treatment for jianguoyun 2024-05-24 22:08:02 +08:00
fyears b769becb97 bump to 0.4.22 2024-05-21 01:04:22 +08:00
fyears 7b3600a46f correct way for nextcloud 2024-05-20 09:56:09 +08:00
fyears 69e72eae1d correctly set range update 2024-05-19 21:41:01 +08:00
fyears b0acde0ba6 remove recursive to fix digest 2024-05-19 20:36:34 +08:00
fyears 11b7fee80b remove unnecessary get 2024-05-19 19:59:16 +08:00
fyears c4c39f6b79 make it more robust 2024-05-19 19:13:56 +08:00
fyears 45578a01dd chunk webdav upload to 5 mb 2024-05-19 18:10:32 +08:00
fyears 0391c42999 wrap settings 2024-05-19 18:09:41 +08:00
fyears 807eec928e optimize sync plan export 2024-05-19 17:51:44 +08:00
fyears 3d7c4d2a4a optimize webdav 2024-05-19 17:03:14 +08:00
fyears cb779fc7bf add upload by chunks for webdav 2024-05-19 15:58:49 +08:00
fyears 9d8e2af7b9 allow skipping empty file in onedrive 2024-05-18 12:03:53 +08:00
Jason a48440e60b
Refine wording and grammar in some descriptions of settings. (#655)
* Refine wording and grammar in some descriptions of settings.

* Remove extra space in settings_encryptionmethod_desc.

* Add required ending comma syntax to settings_webdav_auth_desc.
2024-05-18 02:32:07 +08:00
fyears 2a84fae368 fix typo 2024-05-18 02:31:28 +08:00
fyears 3c3426a842 add more size range 2024-05-18 02:30:50 +08:00
fyears 74de7da89a fix timestamp to date using native js 2024-05-18 01:41:54 +08:00
fyears 1f33ac5d7a remove verbose webdav output 2024-05-18 01:34:46 +08:00
fyears 63c54d1956 check password using walkPartial instead of cache 2024-05-18 01:33:20 +08:00
fyears b584f89a95 profiler itself might impact performance 2024-05-18 00:03:21 +08:00
fyears fa17ea074b format 2024-05-17 22:59:44 +08:00
fyears cb98bae79a buck? localforage 2024-05-17 22:59:34 +08:00
fyears d1e30e3536 bump to 0.4.21 2024-05-09 00:02:07 +08:00
fyears 36079fc1d0 add profiler 2024-05-09 00:01:30 +08:00
fyears 67467a5034 fail statusbar 2024-05-08 22:50:18 +08:00
fyears 2a3df8ab53 fix onedrive issue in enc 2024-05-08 22:37:34 +08:00
fyears e66b0c71c4 fix format again 2024-05-08 22:04:21 +08:00
fyears a081d09212 safe lint from biome 2024-05-08 00:20:15 +08:00
fyears 6ed6122bb6 rm prettier 2024-05-07 23:51:26 +08:00
fyears 235e346d2f slightly format 2024-05-07 23:48:37 +08:00
fyears 084cbc8391 switch formater to biome for speed 2024-05-07 23:48:29 +08:00
fyears dc0c1db779 enable git lfs cache to avoid billing according to https://github.com/actions/checkout/issues/165 2024-05-07 23:22:51 +08:00
fyears 2645ff34e6 bump to 0.4.20 2024-05-07 00:05:41 +08:00
fyears f25a2c2992 shorten text 2024-05-07 00:02:34 +08:00
fyears 3d1269a9f2 add a little helper to see file stat 2024-05-07 00:01:21 +08:00
fyears ed52a8542f fix s3 mtime problem 2024-05-06 23:41:48 +08:00
fyears 757eb5c801 format 2024-04-30 00:57:22 +08:00
fyears 048e7b6251
update readme of s3 (#639) 2024-04-30 00:46:15 +08:00
fyears b762052da3 customize the welcome text 2024-04-30 00:37:52 +08:00
fyears c61efd1367 add steps for cla 2024-04-30 00:13:27 +08:00
fyears 605bffa471
add webdis (#638) 2024-04-30 00:04:00 +08:00
fyears 895e3db4c6 cla bot 2024-04-29 23:51:12 +08:00
fyears ce1990a35f change loglevel for verbose mixedEntityMappings 2024-04-28 00:55:56 +08:00
fyears 9ea7c8e858 bump to 0.4.19 2024-04-28 00:31:14 +08:00
fyears 61a3fab219 webdis 2024-04-27 23:10:36 +08:00
fyears 5340e38eac format of doc 2024-04-27 17:11:59 +08:00
fyears 8d5868b8d8 fix process counting 2024-04-27 17:11:48 +08:00
fyears 55175a6d06 bump to 0.4.18 2024-04-27 12:34:10 +08:00
fyears f60bd25490 add enc for upyun 2024-04-27 12:21:31 +08:00
86 changed files with 6022 additions and 928 deletions

View File

@ -1,3 +1,7 @@
DROPBOX_APP_KEY=
ONEDRIVE_CLIENT_ID=
ONEDRIVE_AUTHORITY=https://
REMOTELYSAVE_WEBSITE=http://127.0.0.1:46683
REMOTELYSAVE_CLIENT_ID=cli-xxx
GOOGLEDRIVE_CLIENT_ID=xxx.apps.googleusercontent.com
GOOGLEDRIVE_CLIENT_SECRET=GOCSPX-sss

View File

@ -19,6 +19,10 @@ jobs:
DROPBOX_APP_KEY: ${{secrets.DROPBOX_APP_KEY}}
ONEDRIVE_CLIENT_ID: ${{secrets.ONEDRIVE_CLIENT_ID}}
ONEDRIVE_AUTHORITY: ${{secrets.ONEDRIVE_AUTHORITY}}
REMOTELYSAVE_WEBSITE: ${{secrets.REMOTELYSAVE_WEBSITE}}
REMOTELYSAVE_CLIENT_ID: ${{secrets.REMOTELYSAVE_CLIENT_ID}}
GOOGLEDRIVE_CLIENT_ID: ${{secrets.GOOGLEDRIVE_CLIENT_ID}}
GOOGLEDRIVE_CLIENT_SECRET: ${{secrets.GOOGLEDRIVE_CLIENT_SECRET}}
strategy:
matrix:
@ -29,10 +33,18 @@ jobs:
- name: Checkout codes
uses: actions/checkout@v2
with:
lfs: true
submodules: recursive
- name: Checkout LFS
run: git lfs checkout
- name: Checkout LFS file list
run: git lfs ls-files --long | cut -d ' ' -f1 | sort > .lfs-assets-id
- name: LFS Cache
uses: actions/cache@v3
with:
path: .git/lfs/objects
key: ${{ runner.os }}-lfs-${{ hashFiles('.lfs-assets-id') }}
restore-keys: |
${{ runner.os }}-lfs-
- name: Git LFS Pull
run: git lfs pull
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v2
with:

43
.github/workflows/cla.yml vendored Normal file
View File

@ -0,0 +1,43 @@
name: "CLA Assistant"
on:
issue_comment:
types: [created]
pull_request_target:
types: [opened,closed,synchronize]
# explicitly configure permissions, in case your GITHUB_TOKEN workflow permissions are set to read-only in repository settings
permissions:
actions: write
contents: write
pull-requests: write
statuses: write
jobs:
CLAAssistant:
runs-on: ubuntu-latest
steps:
- name: "CLA Assistant"
if: (github.event.comment.body == 'recheck' || github.event.comment.body == 'I have read the CLA Document and I hereby sign the CLA') || github.event_name == 'pull_request_target'
uses: contributor-assistant/github-action@v2.3.0
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# the below token should have repo scope and must be manually added by you in the repository's secret
# This token is required only if you have configured to store the signatures in a remote repository/organization
PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
with:
path-to-signatures: 'signatures/version1/cla.json'
path-to-document: 'https://github.com/remotely-save/remotely-save/blob/master/CLA.md' # e.g. a CLA or a DCO document
# branch should not be protected
branch: 'main'
allowlist: bot*
# the followings are the optional inputs - If the optional inputs are not given, then default values will be taken
remote-organization-name: remotely-save
remote-repository-name: cla-signed
#create-file-commit-message: 'For example: Creating file for storing CLA Signatures'
#signed-commit-message: 'For example: $contributorName has signed the CLA in $owner/$repo#$pullRequestNo'
custom-notsigned-prcomment: '<br/>Thank you for your submission, we really appreciate it. However, we ask that $you sign our [Contributor License Agreement](https://github.com/remotely-save/remotely-save/blob/master/CLA.md) before we can accept your contribution. You can sign the CLA by just posting a Pull Request Comment same as the below format.<br/>'
#custom-pr-sign-comment: 'The signature to be committed in order to sign the CLA'
#custom-allsigned-prcomment: 'pull request comment when all contributors has signed, defaults to **CLA Assistant Lite bot** All Contributors have signed the CLA.'
#lock-pullrequest-aftermerge: false - if you don't want this bot to automatically lock the pull request after merging (default - true)
#use-dco-flag: true - If you are using DCO instead of CLA

View File

@ -23,6 +23,10 @@ jobs:
DROPBOX_APP_KEY: ${{secrets.DROPBOX_APP_KEY}}
ONEDRIVE_CLIENT_ID: ${{secrets.ONEDRIVE_CLIENT_ID}}
ONEDRIVE_AUTHORITY: ${{secrets.ONEDRIVE_AUTHORITY}}
REMOTELYSAVE_WEBSITE: ${{secrets.REMOTELYSAVE_WEBSITE}}
REMOTELYSAVE_CLIENT_ID: ${{secrets.REMOTELYSAVE_CLIENT_ID}}
GOOGLEDRIVE_CLIENT_ID: ${{secrets.GOOGLEDRIVE_CLIENT_ID}}
GOOGLEDRIVE_CLIENT_SECRET: ${{secrets.GOOGLEDRIVE_CLIENT_SECRET}}
strategy:
matrix:
@ -32,10 +36,18 @@ jobs:
- name: Checkout codes
uses: actions/checkout@v2
with:
lfs: true
submodules: recursive
- name: Checkout LFS
run: git lfs checkout
- name: Checkout LFS file list
run: git lfs ls-files --long | cut -d ' ' -f1 | sort > .lfs-assets-id
- name: LFS Cache
uses: actions/cache@v3
with:
path: .git/lfs/objects
key: ${{ runner.os }}-lfs-${{ hashFiles('.lfs-assets-id') }}
restore-keys: |
${{ runner.os }}-lfs-
- name: Git LFS Pull
run: git lfs pull
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v2
with:

View File

@ -1,3 +0,0 @@
node_modules/
main.js
data.json

23
CLA.md Normal file
View File

@ -0,0 +1,23 @@
In order to clarify the intellectual property license granted with Contributions from any person or entity, Remotely Save dev team ("Remotely Save") must have on file a signed Contributor License Agreement ("CLA") from each Contributor, indicating agreement with the license terms below. This agreement is for your protection as a Contributor as well as the protection of Remotely Save and its users. It does not change your rights to use your own Contributions for any other purpose.
You accept and agree to the following terms and conditions for Your Contributions (present and future) that you submit to Remotely Save. Except for the license granted herein to Remotely Save and recipients of software distributed by Remotely Save, You reserve all right, title, and interest in and to Your Contributions.
1. Definitions.
"You" (or "Your") shall mean the copyright owner or legal entity authorized by the copyright owner that is making this Agreement with Remotely Save. For legal entities, the entity making a Contribution and all other entities that control, are controlled by, or are under common control with that entity are considered to be a single Contributor. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.
"Contribution" shall mean any original work of authorship, including any modifications or additions to an existing work, that is intentionally submitted by You to Remotely Save for inclusion in, or documentation of, any of the products owned or managed by Remotely Save (the "Work"). For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to Remotely Save or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, Remotely Save for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by You as "Not a Contribution."
2. Grant of Copyright License. Subject to the terms and conditions of this Agreement, You hereby grant to Remotely Save and to recipients of software distributed by Remotely Save a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare derivative works of, publicly display, publicly perform, sublicense, and distribute Your Contributions and such derivative works.
3. Grant of Patent License. Subject to the terms and conditions of this Agreement, You hereby grant to Remotely Save and to recipients of software distributed by Remotely Save a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by You that are necessarily infringed by Your Contribution(s) alone or by combination of Your Contribution(s) with the Work to which such Contribution(s) was submitted. If any entity institutes patent litigation against You or any other entity (including a cross-claim or counterclaim in a lawsuit) alleging that your Contribution, or the Work to which you have contributed, constitutes direct or contributory patent infringement, then any patent licenses granted to that entity under this Agreement for that Contribution or Work shall terminate as of the date such litigation is filed.
4. You represent that you are legally entitled to grant the above license. If your employer(s) has rights to intellectual property that you create that includes your Contributions, you represent that you have received permission to make Contributions on behalf of that employer, that your employer has waived such rights for your Contributions to Remotely Save, or that your employer has executed a separate Corporate CLA with Remotely Save.
5. You represent that each of Your Contributions is Your original creation (see section 7 for submissions on behalf of others). You represent that Your Contribution submissions include complete details of any third-party license or other restriction (including, but not limited to, related patents and trademarks) of which you are personally aware and which are associated with any part of Your Contributions.
6. You are not expected to provide support for Your Contributions, except to the extent You desire to provide support. You may provide support for free, for a fee, or not at all. Unless required by applicable law or agreed to in writing, You provide Your Contributions on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE.
7. Should You wish to submit work that is not Your original creation, You may submit it to Remotely Save separately from any Contribution, identifying the complete details of its source and of any license or other restriction (including, but not limited to, related patents, trademarks, and license agreements) of which you are personally aware, and conspicuously marking the work as "Submitted on behalf of a third-party: [named here]".
8. You agree to notify Remotely Save of any facts or circumstances of which you become aware that would make these representations inaccurate in any respect.

22
CONTRIBUTING.md Normal file
View File

@ -0,0 +1,22 @@
# Contributing
## What
Starting from April 29, 2024, all individual contributors' contributions are only possibly accepted after they sign the CLA.
We do not accept corporate contributions at this moment.
You can check out [CLA](./CLA.md).
## Steps
1. Make some changes to the code. Open a pull request.
2. A rebot will check the status.
![robot check](./assets/cla-process/cla-robot-alert.png)
3. Read the [CLA](./CLA.md) carefully and make a decision.
4. If you decide to sign the CLA, please make a comment "I have read the CLA Document and I hereby sign the CLA".
5. If you decide to not sign the CLA, please close the PR.
6. The robot should recheck and pass the check automatically.
![robot recheck](./assets/cla-process/cla-sign-and-pass.png)
7. Your pr will be reviewed.
8. If you sign the CLA and submit some more PRs, the robot should pass the test automatically.

203
LICENSE
View File

@ -1,202 +1,3 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
The codes or files or subfolders inside the folder `src`, `tests`, `docs`, `assets`, are released under the "Open Source" license: "Apache License, version 2.0", described at: https://www.apache.org/licenses/LICENSE-2.0 .
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
The codes or files or subfolders inside the folder `pro`, are released under the "Source Available" license: "PolyForm Strict License 1.0.0", described at: https://polyformproject.org/licenses/strict/1.0.0/ .

View File

@ -21,15 +21,17 @@ This is yet another unofficial sync plugin for Obsidian. If you like it or find
- Dropbox
- OneDrive for personal
- Webdav
- Webdis
- Google Drive (PRO feature)
- [Here](./docs/services_connectable_or_not.md) shows more connectable (or not-connectable) services in details.
- **Obsidian Mobile supported.** Vaults can be synced across mobile and desktop devices with the cloud service as the "broker".
- **[End-to-end encryption](./docs/encryption/README.md) supported.** Files would be encrypted using openssl format before being sent to the cloud **if** user specify a password.
- **Scheduled auto sync supported.** You can also manually trigger the sync using sidebar ribbon, or using the command from the command palette (or even bind the hot key combination to the command then press the hot key combination).
- **[Minimal Intrusive](./docs/minimal_intrusive_design.md).**
- **Skip Large files** and **skip paths** by custom regex conditions!
- **Fully open source under [Apache-2.0 License](./LICENSE).**
- **[Sync Algorithm open](./docs/sync_algorithm/v3/intro.md) for discussion.**
- **[Basic Conflict Detection And Handling](./docs/sync_algorithm/v3/intro.md)** now, more to come!
- **[Sync Algorithm](./docs/sync_algorithm/v3/intro.md) is provided for discussion.**
- **[Basic Conflict Detection And Handling](./docs/sync_algorithm/v3/intro.md)** for free version. **[Advanced Conflict Handling](./pro/README.md)** for PRO version.
- Source Available. See [License](./LICENSE) for details.
## Limitations
@ -67,11 +69,12 @@ Additionally, the plugin author may occasionally visit Obsidian official forum a
- [腾讯云 COS](./docs/remote_services/s3_tencent_cloud_cos/README.zh-cn.md) | [Tencent Cloud COS](./docs/remote_services/s3_tencent_cloud_cos/README.md)
- [MinIO](./docs/remote_services/s3_minio/README.md)
- [又拍云](./docs/remote_services/s3_upyun/README.zh-cn.md)
- Prepare your S3 (-compatible) service information: [endpoint, region](https://docs.aws.amazon.com/general/latest/gr/s3.html), [access key id, secret access key](https://docs.aws.amazon.com/sdk-for-javascript/v3/developer-guide/getting-your-credentials.html), bucket name. The bucket should be empty and solely for syncing a vault.
- Prepare your S3 (-compatible) service information: [endpoint, region](https://docs.aws.amazon.com/general/latest/gr/s3.html), [access key id, secret access key](https://docs.aws.amazon.com/sdk-for-javascript/v3/developer-guide/getting-your-credentials.html), bucket name.
- If you are using AWS S3, create [policy and user](./docs/remote_services/s3_general/s3_user_policy.md).
- Very old version of Obsidian needs [configuring CORS](./docs/remote_services/s3_general/s3_cors_configure.md).
- Download and enable this plugin.
- Enter your information to the settings of this plugin.
- If you do not set the prefix in the settings, the bucket should be empty and solely for syncing a vault. You can set the prefix in the settings so that the same bucket can store multiple vaults.
- If you want to enable end-to-end encryption, also set a password in settings. If you do not specify a password, the files and folders are synced in plain, original content to the cloud.
- Click the new "circle arrow" icon on the ribbon (the left sidebar), **every time** you want to sync your vault between local and remote. (Or, you could configure auto sync in the settings panel (See next chapter).) While syncing, the icon becomes "two half-circle arrows". Besides clicking the icon on the sidebar ribbon, you can also activate the corresponding command in the command palette.
- **Be patient while syncing.** Especially in the first-time sync.
@ -98,6 +101,8 @@ Additionally, the plugin author may occasionally visit Obsidian official forum a
### webdav
- Tutorials / Examples:
- [Nextcloud](./docs/remote_services/webdav_nextcloud/README.md)
- [The Good Cloud](./docs/remote_services/webdav_thegoodcloud/README.md)
- [ownCloud](./docs/remote_services/webdav_owncloud/README.md)
- [InfiniCloud](./docs/remote_services/webdav_infinicloud_teracloud/README.md)
- [Synology webdav server](./docs/remote_services/webdav_synology_webdav_server/README.md) | [群晖 webdav server](./docs/remote_services/webdav_synology_webdav_server/README.zh-cn.md)
@ -108,6 +113,17 @@ Additionally, the plugin author may occasionally visit Obsidian official forum a
- Password-based end-to-end encryption is also supported. But please be aware that **the vault name itself is not encrypted**.
- If you want to sync the files across multiple devices, **your vault name should be the same** while using default settings.
### Webdis
- Tutorials:
- [Webdis](./docs/remote_services/webdis/README.md)
- Mostly experimental.
- You have to setup and protect your web server by yourself.
### Google Drive (PRO feature)
PRO (paid) feature "sync with Google Drive" allows users to to sync with Google Drive. Tutorials and limitations are documented [here](./docs/remote_services/googledrive/README.md).
## Scheduled Auto Sync
- You can configure auto syncing every N minutes in settings.
@ -120,6 +136,10 @@ Additionally, the plugin author may occasionally visit Obsidian official forum a
In the latest version, you can change the settings to allow syncing `_` files or folders, as well as `.obsidian` special config folder (but not any other `.` files or folders).
## PRO Features
See [PRO](./docs/pro/README.md) for more details.
## How To Debug
See [here](./docs/how_to_debug/README.md) for more details.

BIN
assets/cla-process/cla-robot-alert.png (Stored with Git LFS) Normal file

Binary file not shown.

BIN
assets/cla-process/cla-sign-and-pass.png (Stored with Git LFS) Normal file

Binary file not shown.

55
biome.json Normal file
View File

@ -0,0 +1,55 @@
{
"$schema": "https://biomejs.dev/schemas/1.7.3/schema.json",
"organizeImports": {
"enabled": true
},
"files": {
"ignore": ["main.js"]
},
"formatter": {
"enabled": true,
"formatWithErrors": false,
"ignore": [],
"attributePosition": "auto",
"indentStyle": "space",
"indentWidth": 2,
"lineEnding": "lf",
"lineWidth": 80
},
"javascript": {
"formatter": {
"arrowParentheses": "always",
"bracketSameLine": false,
"bracketSpacing": true,
"jsxQuoteStyle": "double",
"quoteProperties": "asNeeded",
"semicolons": "always",
"trailingComma": "es5"
}
},
"linter": {
"enabled": true,
"rules": {
"recommended": true,
"suspicious": {
"noExplicitAny": "off",
"noPrototypeBuiltins": "off",
"noControlCharactersInRegex": "off"
},
"style": {
"noUselessElse": "off",
"useNodejsImportProtocol": "off",
"noUnusedTemplateLiteral": "off",
"useTemplate": "off",
"noNonNullAssertion": "off"
},
"performance": {
"noDelete": "off"
},
"complexity": {
"noForEach": "off",
"useLiteralKeys": "off"
}
}
}
}

View File

@ -1,25 +0,0 @@
// Importing the http module
const http = require("http");
const requestHandler = (req, res) => {
let body = [];
req
.on("data", (chunk) => {
body.push(chunk);
})
.on("end", () => {
const parsed = JSON.parse(Buffer.concat(body).toString());
const prettyParsed = JSON.stringify(parsed, null, 2);
console.log(prettyParsed);
res.setHeader("Content-Type", "application/json");
res.end(prettyParsed);
});
};
const server = http.createServer(requestHandler);
const addr = "0.0.0.0";
const port = 3000;
server.listen(port, addr, undefined, () => {
console.log(`Server is Running on ${addr}:${port}`);
});

53
docs/pro/README.md Normal file
View File

@ -0,0 +1,53 @@
# PRO Features
From version 0.5.x, Remotely Save introduces PRO (paid) features. Users need to subscribe to (pay) them to use them.
**If you are using basic features only, you don't need an online account, and you don't need to pay for the plugin.**
# Links
* Remotely Save official website: <https://remotelysave.com>
* Sign up / Sign in: <https://remotelysave.com/user/signupin>
* User profile: <https://remotelysave.com/user/profile>
# Disclaimer
It's different from, and NOT affiliated with Obsidian account.
# Steps
## Steps of signing up and signing in
1. Go to the website, sign up and sign in. You can directly visit <https://remotelysave.com/user/signupin> or click the link in Remotely Save plugin setting page.
![pro setting](./pro_setting.png)
2. Use an email and your password as usual. Don't need to be GMail account.
## Steps of connecting
You need to connect your plugin to your online account. In Obsidian, in your Remotely Save plugin setting, you can click the button "Connect" to start the flow.
1. You will see a special address on website. Click it and visit the website
2. Click "allow" on the website.
3. In the end of the auth flow on the website, you will be shown up a code, please copy it...
4. And paste the code back to the plugin modal inside Obsidian, and confirm.
![connect flow](./connect_flow.png)
## Steps of subscribing to some features.
1. Firstly please visit your [profile page](https://remotelysave.com/user/profile) online.
2. You can subscribe to some features. Prices vary.
![PRO features online](./pro_features_enabled_on_website.png)
3. Go back to your Remotely Save plugin inside Obsidian, click "Check again" button in PRO settings. So that the plugin knows some features are enabled.
![check again PRO features](./check_pro_features_in_settings.png)
4. Sync and enjoy your PRO features!
## Why so complicated?
Because we doesn't have payment method inside the plugin, so we have to:
* build a website,
* require users having online accounts
* and connect the plugin to the online account.
Moreover, an online account allows flexibe management of subscriptions.

BIN
docs/pro/check_pro_features_in_settings.png (Stored with Git LFS) Normal file

Binary file not shown.

BIN
docs/pro/connect_flow.png (Stored with Git LFS) Normal file

Binary file not shown.

BIN
docs/pro/pro_features_enabled_on_website.png (Stored with Git LFS) Normal file

Binary file not shown.

BIN
docs/pro/pro_setting.png (Stored with Git LFS) Normal file

Binary file not shown.

View File

@ -0,0 +1,59 @@
# Google Drive (PRO)
# Intro
* It's a PRO feature of Remotely Save plugin.
* **This plugin is NOT an official Google product, and just uses Google Drive's public api.**
# Steps
## Steps of Remotely Save subscription
1. Please sign up and sign in an online account, connect your plugin to your online account firstly. See [the PRO tutorial](../../pro/README.md) firstly.
2. Subscribe to "sync with Google Drive" feature online.
3. Go back to your Remotely Save plugin inside Obsidian, click "Check again" button in PRO settings. So that the plugin knows some features are enabled. In this case, sync with Google Drive should be detected.
## Steps of Connecting to your Google Drive
After you enabled the PRO feature in your Remotely Save plugin, you can connect to your Google Drive account now.
1. In Remotely Save settings, change your sync service to Google Drive.
![change remote to google drive](./change_remote_to_google_drive.png)
2. Click Auth, visit the link, go to Remotely Save website to start.
![visit start link](./google_drive_auth_link.png)
3. On the website, click the link to go to Google Drive auth page.
4. Follow the instruction on Google website, and allow (continue) Remotely Save to connect.
![allow Remotely Save in Google website](./google_drive_auth_allow.png)
5. You will be redirected to Remotely Save website, and you will get a code. Copy it...
![redirected back and get the code](./google_drive_auth_code_show.png)
6. ... And paste the code back to the plugin inside Obsidian. Click submit.
![submit the code in setting](./google_drive_code_submit.png)
7. A notice will tell you that you've connected or not.
8. Sync! The plugin will create a vault folder in the root of your Google Drive and upload notes into that folder.
9. **Read the caveats below.**
# Why so complicated?
Because Google Drive's api doesn't fit into the special envorinment of Obsidian plugin. So we need a website.
# The credential
The website does **NOT** store or save the Google drive credential (the code you obtian in the end of the flow). The website is just a "bridge" to help you obtain that code, and just manage your subscription to PRO features.
But please be aware that the code is saved locally in your Obsidian. It works like a special password. So that the plugin can upload or download or modify the files for you.
# The caveats
* As of June 2024, this feature is in beta stage. **Back up your vault before using this feature.**
* The plugin can **only** sees, reads or writes the files and folders created by itself!
It means that, you CANNOT manually create the vault folder in your Google Drive account. And if you manually upload any files using Google's official website, the plugin does **NOT** see them. All operations must go through Obsidian and uploaded by the plugin.
You can, however, view, and download the files on Google Drive [official web page](https://drive.google.com/drive/u/0/my-drive).
Precisely speaking, the plugin applies for the `drive.file` scope recommended by Google. See [the doc](https://developers.google.com/drive/api/guides/api-specific-auth#benefits) by Google for the scope's benefits. Basically the plugin will never (is unable to) mess up your other files or folders.
Moreover, this scope is "not-sensitive", so that the plugin doesn't need to go through Google's complicated verification process while applying for it.
* Google Drive, unlike other cloud storage, allows files of same name co-existing in the same folder! (hmmmmm...) It may or may not make the plugin stop working. Users might need to remove the duplicated file manually on Google's official website.

Binary file not shown.

BIN
docs/remote_services/googledrive/google_drive_auth_allow.png (Stored with Git LFS) Normal file

Binary file not shown.

Binary file not shown.

BIN
docs/remote_services/googledrive/google_drive_auth_link.png (Stored with Git LFS) Normal file

Binary file not shown.

Binary file not shown.

View File

@ -2,18 +2,23 @@
## 链接
* 官网 <https://www.upyun.com/>
* 官网的 S3 文档 <https://help.upyun.com/knowledge-base/aws-s3%e5%85%bc%e5%ae%b9/>
- 官网 <https://www.upyun.com/>
- 官网的 S3 文档 <https://help.upyun.com/knowledge-base/aws-s3%e5%85%bc%e5%ae%b9/>
## 注意!!!!!
又拍云似乎(?)文件都是默认公开的,强烈建议注意隐私问题,强烈建议设置插件加密。
## 步骤
1. 注册,新建对象存储。
2. 参考官网文档 <https://help.upyun.com/knowledge-base/aws-s3%e5%85%bc%e5%ae%b9/>,创建操作员然后创建 S3 访问凭证。
3. 在 Remotely Save 设置以下:
* 服务地址Endpoint`s3.api.upyun.com` **一定是这个域名**
* 区域Region`us-east-1`
* Acccess Key ID您获取到的访问凭证的 AccessKey
* Secret Access Key您获取到的访问凭证的 SecretAccessKey
* 存储桶Bucket的名字您创建的“服务名”
* 是否生成文件夹 Object不生成默认 **一定要选择不生成**
4. 同步。
- 服务地址Endpoint`s3.api.upyun.com` **一定是这个域名**
- 区域Region`us-east-1`
- Acccess Key ID您获取到的访问凭证的 AccessKey
- Secret Access Key您获取到的访问凭证的 SecretAccessKey
- 存储桶Bucket的名字您创建的“服务名”
- 是否生成文件夹 Object不生成默认 **一定要选择不生成**
4. 可以在插件设置里,加上密码。
5. 同步。

View File

@ -0,0 +1,13 @@
# Nextcloud
## Link
<https://nextcloud.com/>
## Steps
1. Install, or find a hosted version.
* The docker version <https://github.com/nextcloud/docker> for internal network, and [Caddy as reverse proxy](https://caddyserver.com/docs/quick-starts/reverse-proxy) (for https), are personally recommended.
* If you find installing Nextcloud by yourselves is difficult, you can find some "Nextcloud's trusted, certified providers" on [Nextcloud Sign up page](https://nextcloud.com/sign-up/); For example, [The Good Cloud](https://thegood.cloud/) there generously provides 2 GB free stoarage space.
* Remotely Save is tested to be working with the docker version and The Good Cloud.
2. Go to Nextcloud's settings. Find the webdav url (something like `https://cloud.example.com/remote.php/dav/files/USERNAME`). Use this (without tailing slash), and your account and your password, in Remotely Save.

View File

@ -0,0 +1,9 @@
# The Good Cloud
## Link
<https://thegood.cloud/>
## Steps
It's a hosted version of Nextcloud providing 2GB free spaces. See [NextCloud](../webdav_nextcloud/README.md) for more instructions.

View File

@ -0,0 +1,35 @@
# Webdis
## Links
- Webdis: <https://github.com/nicolasff/webdis>
- Redis®: <https://redis.io/>
## Explanation and Background
I like the Redis® software very much, and would like to experiment using it as a "file storage". It seems to be nature by using path as the key and the content as the value (Sort of..., see below).
However, Redis® works by using TCP connections, and browser js cannot establish raw TCP connections. We need a HTTP gateway, to provide the HTTP api. Wedis seems to be the most famous open source one.
And of course, this method should work for Redis® alternatives: Valkey, Redict, KeyDB, Dragonfly, Garnet, ...
## Disclaimer
This app is NOT an official Redis® Ltd / Redis® OSS / Webdis product. Redis is a registered trademark of Redis Ltd.
**Never expose your Redis® or Webdis to public without security protection!!!** You are response for protecting your server.
## Usage
1. Install Redis®.
2. Install Webdis.
3. In `webdis.json`, configure the ACL for using password and username, and / or ip filters. **Never expose your Redis® or Webdis to public without security protection!!!**.
4. Install and configure reverse proxy, firewall, https, etc. (You have to configure HTTPS correctly if you want to use it on iOS)
5. In Remotely Save settings, enter your server address, username, password, and adjust the "base dir". Check connection.
6. Sync!
7. Serveral keys and values will be generated in your Redis® database:
```
rs:fs:v1:${encodeURIComponent(vaultName+'/'+folderStructure+'/'+fileName)}:meta # you can HGETALL it
rs:fs:v1:${encodeURIComponent(vaultName+'/'+folderStructure+'/'+fileName)}:content # you can GET it
```

View File

@ -1,7 +1,7 @@
import dotenv from "dotenv/config";
import "dotenv/config";
import esbuild from "esbuild";
import process from "process";
import inlineWorkerPlugin from "esbuild-plugin-inline-worker";
import process from "process";
// import builtins from 'builtin-modules'
const banner = `/*
@ -17,6 +17,11 @@ const prod = process.argv[2] === "production";
const DEFAULT_DROPBOX_APP_KEY = process.env.DROPBOX_APP_KEY || "";
const DEFAULT_ONEDRIVE_CLIENT_ID = process.env.ONEDRIVE_CLIENT_ID || "";
const DEFAULT_ONEDRIVE_AUTHORITY = process.env.ONEDRIVE_AUTHORITY || "";
const DEFAULT_REMOTELYSAVE_WEBSITE = process.env.REMOTELYSAVE_WEBSITE || "";
const DEFAULT_REMOTELYSAVE_CLIENT_ID = process.env.REMOTELYSAVE_CLIENT_ID || "";
const DEFAULT_GOOGLEDRIVE_CLIENT_ID = process.env.GOOGLEDRIVE_CLIENT_ID || "";
const DEFAULT_GOOGLEDRIVE_CLIENT_SECRET =
process.env.GOOGLEDRIVE_CLIENT_SECRET || "";
esbuild
.context({
@ -52,6 +57,10 @@ esbuild
"process.env.DEFAULT_DROPBOX_APP_KEY": `"${DEFAULT_DROPBOX_APP_KEY}"`,
"process.env.DEFAULT_ONEDRIVE_CLIENT_ID": `"${DEFAULT_ONEDRIVE_CLIENT_ID}"`,
"process.env.DEFAULT_ONEDRIVE_AUTHORITY": `"${DEFAULT_ONEDRIVE_AUTHORITY}"`,
"process.env.DEFAULT_REMOTELYSAVE_WEBSITE": `"${DEFAULT_REMOTELYSAVE_WEBSITE}"`,
"process.env.DEFAULT_REMOTELYSAVE_CLIENT_ID": `"${DEFAULT_REMOTELYSAVE_CLIENT_ID}"`,
"process.env.DEFAULT_GOOGLEDRIVE_CLIENT_ID": `"${DEFAULT_GOOGLEDRIVE_CLIENT_ID}"`,
"process.env.DEFAULT_GOOGLEDRIVE_CLIENT_SECRET": `"${DEFAULT_GOOGLEDRIVE_CLIENT_SECRET}"`,
global: "window",
"process.env.NODE_DEBUG": `undefined`, // ugly fix
"process.env.DEBUG": `undefined`, // ugly fix

View File

@ -1,2 +1,2 @@
export let Buffer = require("buffer").Buffer;
export let process = require("process/browser");
export const Buffer = require("buffer").Buffer;
export const process = require("process/browser");

View File

@ -1,11 +1,11 @@
{
"id": "remotely-save",
"name": "Remotely Save",
"version": "0.4.16",
"version": "0.5.3",
"minAppVersion": "0.13.21",
"description": "Yet another unofficial plugin allowing users to synchronize notes between local device and the cloud service.",
"author": "fyears",
"authorUrl": "https://github.com/fyears",
"isDesktopOnly": false,
"fundingUrl": "https://github.com/remotely-save/donation"
"fundingUrl": "https://remotelysave.com"
}

View File

@ -1,11 +1,11 @@
{
"id": "remotely-save",
"name": "Remotely Save",
"version": "0.4.16",
"version": "0.5.3",
"minAppVersion": "0.13.21",
"description": "Yet another unofficial plugin allowing users to synchronize notes between local device and the cloud service.",
"author": "fyears",
"authorUrl": "https://github.com/fyears",
"isDesktopOnly": false,
"fundingUrl": "https://github.com/remotely-save/donation"
"fundingUrl": "https://remotelysave.com"
}

View File

@ -1,15 +1,15 @@
{
"name": "remotely-save",
"version": "0.4.16",
"version": "0.5.3",
"description": "This is yet another sync plugin for Obsidian app.",
"scripts": {
"dev2": "node esbuild.config.mjs --watch",
"build2": "tsc -noEmit -skipLibCheck && node esbuild.config.mjs production",
"build": "webpack --mode production",
"dev": "webpack --mode development --watch",
"format": "npx prettier --trailing-comma es5 --write .",
"format": "npx @biomejs/biome check --apply .",
"clean": "npx rimraf main.js",
"test": "cross-env TS_NODE_COMPILER_OPTIONS={\\\"module\\\":\\\"commonjs\\\"} mocha -r ts-node/register 'tests/**/*.ts'"
"test": "cross-env TS_NODE_COMPILER_OPTIONS={\\\"module\\\":\\\"commonjs\\\"} mocha -r ts-node/register 'tests/**/*.ts' 'pro/tests/**/*.ts'"
},
"browser": {
"path": "path-browserify",
@ -23,8 +23,9 @@
"source": "main.ts",
"keywords": [],
"author": "",
"license": "Apache-2.0",
"license": "SEE LICENSE IN LICENSE",
"devDependencies": {
"@biomejs/biome": "1.7.3",
"@microsoft/microsoft-graph-types": "^2.40.0",
"@types/chai": "^4.3.14",
"@types/chai-as-promised": "^7.1.8",
@ -44,7 +45,6 @@
"mocha": "^10.4.0",
"npm-check-updates": "^16.14.20",
"obsidian": "^1.5.7",
"prettier": "^3.2.5",
"ts-loader": "^9.5.1",
"ts-node": "^10.9.2",
"tslib": "^2.6.2",
@ -63,6 +63,7 @@
"@fyears/rclone-crypt": "^0.0.7",
"@fyears/tsqueue": "^1.0.1",
"@microsoft/microsoft-graph-client": "^3.0.7",
"@sanity/diff-match-patch": "^3.1.1",
"@smithy/fetch-http-handler": "^2.5.0",
"@smithy/protocol-http": "^3.3.0",
"@smithy/querystring-builder": "^2.2.0",
@ -77,11 +78,13 @@
"http-status-codes": "^2.3.0",
"localforage": "^1.10.0",
"localforage-getitems": "^1.4.2",
"localforage-removeitems": "^1.4.0",
"lodash": "^4.17.21",
"lucide": "^0.376.1",
"mime-types": "^2.1.35",
"mustache": "^4.2.0",
"nanoid": "^5.0.7",
"node-diff3": "^3.1.2",
"p-queue": "^8.0.1",
"path-browserify": "^1.0.1",
"process": "^0.11.10",

104
pro/LICENSE Normal file
View File

@ -0,0 +1,104 @@
# PolyForm Strict License 1.0.0
<https://polyformproject.org/licenses/strict/1.0.0>
## Acceptance
In order to get any license under these terms, you must agree
to them as both strict obligations and conditions to all
your licenses.
## Copyright License
The licensor grants you a copyright license for the software
to do everything you might do with the software that would
otherwise infringe the licensor's copyright in it for any
permitted purpose, other than distributing the software or
making changes or new works based on the software.
## Patent License
The licensor grants you a patent license for the software that
covers patent claims the licensor can license, or becomes able
to license, that you would infringe by using the software.
## Noncommercial Purposes
Any noncommercial purpose is a permitted purpose.
## Personal Uses
Personal use for research, experiment, and testing for
the benefit of public knowledge, personal study, private
entertainment, hobby projects, amateur pursuits, or religious
observance, without any anticipated commercial application,
is use for a permitted purpose.
## Noncommercial Organizations
Use by any charitable organization, educational institution,
public research organization, public safety or health
organization, environmental protection organization,
or government institution is use for a permitted purpose
regardless of the source of funding or obligations resulting
from the funding.
## Fair Use
You may have "fair use" rights for the software under the
law. These terms do not limit them.
## No Other Rights
These terms do not allow you to sublicense or transfer any of
your licenses to anyone else, or prevent the licensor from
granting licenses to anyone else. These terms do not imply
any other licenses.
## Patent Defense
If you make any written claim that the software infringes or
contributes to infringement of any patent, your patent license
for the software granted under these terms ends immediately. If
your company makes such a claim, your patent license ends
immediately for work on behalf of your company.
## Violations
The first time you are notified in writing that you have
violated any of these terms, or done anything with the software
not covered by your licenses, your licenses can nonetheless
continue if you come into full compliance with these terms,
and take practical steps to correct past violations, within
32 days of receiving notice. Otherwise, all your licenses
end immediately.
## No Liability
***As far as the law allows, the software comes as is, without
any warranty or condition, and the licensor will not be liable
to you for any damages arising out of these terms or the use
or nature of the software, under any kind of legal claim.***
## Definitions
The **licensor** is the individual or entity offering these
terms, and the **software** is the software the licensor makes
available under these terms.
**You** refers to the individual or entity agreeing to these
terms.
**Your company** is any legal entity, sole proprietorship,
or other kind of organization that you work for, plus all
organizations that have control over, are under the control of,
or are under common control with that organization. **Control**
means ownership of substantially all the assets of an entity,
or the power to direct its management and policies by vote,
contract, or otherwise. Control can be direct or indirect.
**Your licenses** are all the licenses granted to you for the
software under these terms.
**Use** means anything you do with the software requiring one
of your licenses.

25
pro/README.md Normal file
View File

@ -0,0 +1,25 @@
# Pro Features
## What?
Remotely Save has some "pro features", which users have to pay for using them.
## Sign Up / Sign In And Connect
See the tutorial about your PRO account [here](../docs/pro/README.md).
## Smart Conflict
Basic (free) version can detect conflicts, but users have to choose to keep newer version or larger version of the files.
PRO (paid) feature "Smart Conflict" gives users one more option: merge small markdown files, or duplicate large markdown files or non-markdown files.
## Sync With Google Drive
PRO (paid) feature "sync with Google Drive" allows users to to sync with Google Drive. Tutorials and limitations are documented [here](../docs/remote_services/googledrive/README.md).
## License
The codes or files or subfolders inside the current folder (`pro` in the repo), are released under "source available" license: "PolyForm Strict License 1.0.0".
Suggestions are welcome.

328
pro/src/account.ts Normal file
View File

@ -0,0 +1,328 @@
import { nanoid } from "nanoid";
import { base64url } from "rfc4648";
import {
OAUTH2_FORCE_EXPIRE_MILLISECONDS,
type RemotelySavePluginSettings,
} from "../../src/baseTypes";
import {
COMMAND_CALLBACK_PRO,
type FeatureInfo,
PRO_CLIENT_ID,
type PRO_FEATURE_TYPE,
PRO_WEBSITE,
type ProConfig,
} from "./baseTypesPro";
const site = PRO_WEBSITE;
console.debug(`remotelysave official website: ${site}`);
export const DEFAULT_PRO_CONFIG: ProConfig = {
accessToken: "",
accessTokenExpiresInMs: 0,
accessTokenExpiresAtTimeMs: 0,
refreshToken: "",
enabledProFeatures: [],
email: "",
};
/**
* https://datatracker.ietf.org/doc/html/rfc7636
* dBjftJeZ4CVP-mB92K27uhbUJU1p1r_wW1gFWFOEjXk
* => E9Melhoa2OwvFrEMTJguCHaoeK1t8URWbuGJSstw-cM
* @param x
* @returns BASE64URL-ENCODE(SHA256(ASCII(code_verifier)))
*/
async function codeVerifier2CodeChallenge(x: string) {
if (x === undefined || x === "") {
return "";
}
try {
return base64url.stringify(
new Uint8Array(
await crypto.subtle.digest("SHA-256", new TextEncoder().encode(x))
),
{
pad: false,
}
);
} catch (e) {
return "";
}
}
export const generateAuthUrlAndCodeVerifierChallenge = async (
hasCallback: boolean
) => {
const appKey = PRO_CLIENT_ID ?? "cli-"; // hard-code
const codeVerifier = nanoid(128);
const codeChallenge = await codeVerifier2CodeChallenge(codeVerifier);
let authUrl = `${site}/oauth2/authorize?response_type=code&client_id=${appKey}&token_access_type=offline&code_challenge_method=S256&code_challenge=${codeChallenge}&scope=pro.list.read`;
if (hasCallback) {
authUrl += `&redirect_uri=obsidian://${COMMAND_CALLBACK_PRO}`;
}
return {
authUrl,
codeVerifier,
codeChallenge,
};
};
export const sendAuthReq = async (
verifier: string,
authCode: string,
errorCallBack: any
) => {
const appKey = PRO_CLIENT_ID ?? "cli-"; // hard-code
try {
const k = {
code: authCode,
grant_type: "authorization_code",
code_verifier: verifier,
client_id: appKey,
// redirect_uri: `obsidian://${COMMAND_CALLBACK_PRO}`,
scope: "pro.list.read",
};
// console.debug(k);
const resp1 = await fetch(`${site}/api/v1/oauth2/token`, {
method: "POST",
body: new URLSearchParams(k),
});
const resp2 = await resp1.json();
return resp2;
} catch (e) {
console.error(e);
if (errorCallBack !== undefined) {
await errorCallBack(e);
}
}
};
export const sendRefreshTokenReq = async (refreshToken: string) => {
const appKey = PRO_CLIENT_ID ?? "cli-"; // hard-code
try {
console.info("start auto getting refreshed Remotely Save access token.");
const resp1 = await fetch(`${site}/api/v1/oauth2/token`, {
method: "POST",
body: new URLSearchParams({
grant_type: "refresh_token",
refresh_token: refreshToken,
client_id: appKey,
scope: "pro.list.read",
}),
});
const resp2: AuthResError | AuthResSucc = await resp1.json();
console.info("finish auto getting refreshed Remotely Save access token.");
return resp2;
} catch (e) {
console.error(e);
throw e;
}
};
interface AuthResError {
error: "invalid_request";
}
interface AuthResSucc {
error: undefined; // needed for typescript
refresh_token?: string;
access_token: string;
expires_in: number;
}
export const setConfigBySuccessfullAuthInplace = async (
config: ProConfig,
authRes: AuthResError | AuthResSucc,
saveUpdatedConfigFunc: () => Promise<any> | undefined
) => {
if (authRes.error !== undefined) {
throw Error(`you should not save the setting for ${authRes.error}`);
}
config.accessToken = authRes.access_token;
config.accessTokenExpiresAtTimeMs =
Date.now() + authRes.expires_in * 1000 - 5 * 60 * 1000;
config.accessTokenExpiresInMs = authRes.expires_in * 1000;
config.refreshToken = authRes.refresh_token || config.refreshToken;
// manually set it expired after 80 days;
config.credentialsShouldBeDeletedAtTimeMs =
Date.now() + OAUTH2_FORCE_EXPIRE_MILLISECONDS;
await saveUpdatedConfigFunc?.();
console.info(
"finish updating local info of Remotely Save official website token"
);
};
export const getAccessToken = async (
config: ProConfig,
saveUpdatedConfigFunc: () => Promise<any> | undefined
) => {
const ts = Date.now();
if (
config.accessToken !== undefined &&
config.accessToken !== "" &&
config.accessTokenExpiresAtTimeMs > ts &&
(config.credentialsShouldBeDeletedAtTimeMs ?? ts + 1000 * 1000) > ts
) {
return config.accessToken;
}
console.debug(
`currently, accessToken=${config.accessToken}, accessTokenExpiresAtTimeMs=${
config.accessTokenExpiresAtTimeMs
}, credentialsShouldBeDeletedAtTimeMs=${
config.credentialsShouldBeDeletedAtTimeMs
},comp1=${config.accessTokenExpiresAtTimeMs > ts}, comp2=${
(config.credentialsShouldBeDeletedAtTimeMs ?? ts + 1000 * 1000) > ts
}`
);
// try to get it again??
const res = await sendRefreshTokenReq(config.refreshToken ?? "refresh-");
await setConfigBySuccessfullAuthInplace(config, res, saveUpdatedConfigFunc);
if (res.error !== undefined) {
throw Error("cannot update accessToken");
}
return res.access_token;
};
export const getAndSaveProFeatures = async (
config: ProConfig,
pluginVersion: string,
saveUpdatedConfigFunc: () => Promise<any> | undefined
) => {
const access = await getAccessToken(config, saveUpdatedConfigFunc);
const resp1 = await fetch(`${site}/api/v1/pro/list`, {
method: "GET",
headers: {
Authorization: `Bearer ${access}`,
"REMOTELYSAVE-API-Plugin-Ver": pluginVersion,
},
});
const rsp2: {
proFeatures: FeatureInfo[];
} = await resp1.json();
config.enabledProFeatures = rsp2.proFeatures;
await saveUpdatedConfigFunc?.();
return rsp2;
};
export const getAndSaveProEmail = async (
config: ProConfig,
pluginVersion: string,
saveUpdatedConfigFunc: () => Promise<any> | undefined
) => {
const access = await getAccessToken(config, saveUpdatedConfigFunc);
const resp1 = await fetch(`${site}/api/v1/profile/list`, {
method: "GET",
headers: {
Authorization: `Bearer ${access}`,
"REMOTELYSAVE-API-Plugin-Ver": pluginVersion,
},
});
const rsp2: {
email: string;
} = await resp1.json();
config.email = rsp2.email;
await saveUpdatedConfigFunc?.();
return rsp2;
};
/**
* If the check doesn't pass, the function should throw the error
* @returns
*/
export const checkProRunnableAndFixInplace = async (
featuresToCheck: PRO_FEATURE_TYPE[],
config: RemotelySavePluginSettings,
pluginVersion: string,
saveUpdatedConfigFunc: () => Promise<any> | undefined
): Promise<true> => {
console.debug(`checkProRunnableAndFixInplace`);
// many checks if status is valid
// no account
if (config.pro === undefined || config.pro.refreshToken === undefined) {
throw Error(`you need to "connect" to your account to use PRO features`);
}
// every features should have at most 40 days expiration dates
// and if the time has expired, we also check
const msIn40Days = 1000 * 60 * 60 * 24 * 40;
for (const f of config.pro.enabledProFeatures) {
const tooFarInTheFuture = f.expireAtTimeMs >= Date.now() + msIn40Days;
const alreadyExpired = f.expireAtTimeMs <= Date.now();
if (tooFarInTheFuture || alreadyExpired) {
console.info(
`the pro feature is too far in the future and has expired, check again.`
);
await getAndSaveProFeatures(
config.pro,
pluginVersion,
saveUpdatedConfigFunc
);
break;
}
}
const errorMsgs = [];
// check for the features
if (featuresToCheck.contains("feature-smart_conflict")) {
if (config.conflictAction === "smart_conflict") {
if (
config.pro.enabledProFeatures.filter(
(x) => x.featureName === "feature-smart_conflict"
).length === 1
) {
// good to go
} else {
errorMsgs.push(
`You're trying to use "smart conflict" PRO feature but you haven't subscribe to it.`
);
}
} else {
// good to go
}
}
if (featuresToCheck.contains("feature-google_drive")) {
console.debug(
`checking "feature-google_drive", serviceType=${config.serviceType}`
);
console.debug(
`enabledProFeatures=${JSON.stringify(config.pro.enabledProFeatures)}`
);
if (config.serviceType === "googledrive") {
if (
config.pro.enabledProFeatures.filter(
(x) => x.featureName === "feature-google_drive"
).length === 1
) {
// good to go
} else {
errorMsgs.push(
`You're trying to use "sync with Google Drive" PRO feature but you haven't subscribe to it.`
);
}
} else {
// good to go
}
}
if (errorMsgs.length !== 0) {
throw Error(errorMsgs.join("\n\n"));
}
return true;
};

40
pro/src/baseTypesPro.ts Normal file
View File

@ -0,0 +1,40 @@
export const MERGABLE_SIZE = 1000 * 1000; // 1 MB
export const COMMAND_CALLBACK_PRO = "remotely-save-cb-pro";
export const PRO_CLIENT_ID = process.env.DEFAULT_REMOTELYSAVE_CLIENT_ID;
export const PRO_WEBSITE = process.env.DEFAULT_REMOTELYSAVE_WEBSITE;
export type PRO_FEATURE_TYPE =
| "feature-smart_conflict"
| "feature-google_drive";
export interface FeatureInfo {
featureName: PRO_FEATURE_TYPE;
enableAtTimeMs: bigint;
expireAtTimeMs: bigint;
}
export interface ProConfig {
email?: string;
refreshToken?: string;
accessToken: string;
accessTokenExpiresInMs: number;
accessTokenExpiresAtTimeMs: number;
enabledProFeatures: FeatureInfo[];
credentialsShouldBeDeletedAtTimeMs?: number;
}
export interface GoogleDriveConfig {
accessToken: string;
accessTokenExpiresInMs: number;
accessTokenExpiresAtTimeMs: number;
refreshToken: string;
remoteBaseDir?: string;
credentialsShouldBeDeletedAtTimeMs?: number;
scope: "https://www.googleapis.com/auth/drive.file";
}
export const DEFAULT_GOOGLEDRIVE_CLIENT_ID =
process.env.DEFAULT_GOOGLEDRIVE_CLIENT_ID;
export const DEFAULT_GOOGLEDRIVE_CLIENT_SECRET =
process.env.DEFAULT_GOOGLEDRIVE_CLIENT_SECRET;

257
pro/src/conflictLogic.ts Normal file
View File

@ -0,0 +1,257 @@
import isEqual from "lodash/isEqual";
// import {
// makePatches,
// applyPatches,
// stringifyPatches,
// parsePatch,
// } from "@sanity/diff-match-patch";
import {
LCS,
diff3Merge,
diffComm,
diffPatch,
mergeDiff3,
mergeDigIn,
patch,
} from "node-diff3";
import type { Entity } from "../../src/baseTypes";
import { copyFile } from "../../src/copyLogic";
import type { FakeFs } from "../../src/fsAll";
import { MERGABLE_SIZE } from "./baseTypesPro";
export function isMergable(a: Entity, b?: Entity) {
if (b !== undefined && a.keyRaw !== b.keyRaw) {
return false;
}
return (
!a.keyRaw.endsWith("/") &&
a.sizeRaw <= MERGABLE_SIZE &&
(a.keyRaw.endsWith(".md") || a.keyRaw.endsWith(".markdown"))
);
}
/**
* slightly modify to adjust in markdown context
* @param a
* @param o
* @param b
*/
function mergeDigInModified(a: string, o: string, b: string) {
const { conflict, result } = mergeDigIn(a, o, b);
for (let index = 0; index < result.length; ++index) {
if (["<<<<<<<", "=======", ">>>>>>>"].contains(result[index])) {
result[index] = "`" + result[index] + "`";
}
}
return {
conflict,
result,
};
}
function getLCSText(a: string, b: string) {
const aa = a.split("\n");
const bb = b.split("\n");
let raw = LCS(aa, bb);
const k: string[] = [];
do {
k.unshift(aa[raw.buffer1index]);
raw = raw.chain as any;
} while (raw !== null && raw !== undefined && raw.buffer1index !== -1);
return k.join("\n");
}
/**
* It's tricky. We find LCS then pretend it's the original text
* @param a
* @param b
* @returns
*/
function twoWayMerge(a: string, b: string): string {
// const c = getLCSText(a, b);
// const patches = makePatches(c, a);
// const [d] = applyPatches(patches, b);
const c = getLCSText(a, b);
const d = mergeDigInModified(a, c, b).result.join("\n");
return d;
}
/**
* Originally three way merge.
* @param a
* @param b
* @param orig
* @returns
*/
function threeWayMerge(a: string, b: string, orig: string) {
return mergeDigInModified(a, orig, b).result.join("\n");
}
export async function mergeFile(
key: string,
left: FakeFs,
right: FakeFs,
contentOrig: ArrayBuffer | null | undefined
) {
// console.debug(
// `mergeFile: key=${key}, left=${left.kind}, right=${right.kind}`
// );
if (key.endsWith("/")) {
throw Error(`should not call ${key} in mergeFile`);
}
if (!key.endsWith(".md") && !key.endsWith(".markdown")) {
throw Error(`currently only support markdown files in mergeFile`);
}
const [contentLeft, contentRight] = await Promise.all([
left.readFile(key),
right.readFile(key),
]);
let newArrayBuffer: ArrayBuffer | undefined = undefined;
const decoder = new TextDecoder("utf-8");
if (isEqual(contentLeft, contentRight)) {
// we are lucky enough
newArrayBuffer = contentLeft;
// TODO: save the write
} else {
if (contentOrig === null || contentOrig === undefined) {
const newText = twoWayMerge(
decoder.decode(contentLeft),
decoder.decode(contentRight)
);
// no need to worry about the offset here because the array is new and not sliced
newArrayBuffer = new TextEncoder().encode(newText).buffer;
} else {
const newText = threeWayMerge(
decoder.decode(contentLeft),
decoder.decode(contentRight),
decoder.decode(contentOrig)
);
newArrayBuffer = new TextEncoder().encode(newText).buffer;
}
}
const mtime = Date.now();
// left (local) must wait for the right
// because the mtime might be different after upload
// upload firstly
const rightEntity = await right.writeFile(key, newArrayBuffer, mtime, mtime);
// write local secondly
const leftEntity = await left.writeFile(
key,
newArrayBuffer,
rightEntity.mtimeCli ?? mtime,
rightEntity.mtimeCli ?? mtime
);
return {
entity: rightEntity,
content: newArrayBuffer,
};
}
export function getFileRename(key: string) {
if (
key === "" ||
key === "." ||
key === ".." ||
key === "/" ||
key.endsWith("/")
) {
throw Error(`we cannot rename key=${key}`);
}
const segsPath = key.split("/");
const name = segsPath[segsPath.length - 1];
const segsName = name.split(".");
if (segsName.length === 0) {
throw Error(`we cannot rename key=${key}`);
} else if (segsName.length === 1) {
// name = "kkk" without any dot
segsPath[segsPath.length - 1] = `${name}.dup`;
} else if (segsName.length === 2) {
if (segsName[0] === "") {
// name = ".kkkk" with leading dot
segsPath[segsPath.length - 1] = `${name}.dup`;
} else if (segsName[1] === "") {
// name = "kkkk." with tailing dot
segsPath[segsPath.length - 1] = `${segsName[0]}.dup`;
} else {
// name = "aaa.bbb" normally
segsPath[segsPath.length - 1] = `${segsName[0]}.dup.${segsName[1]}`;
}
} else {
// name = "[...].bbb.ccc"
const firstPart = segsName.slice(0, segsName.length - 1).join(".");
const thirdPart = segsName[segsName.length - 1];
segsPath[segsPath.length - 1] = `${firstPart}.dup.${thirdPart}`;
}
const res = segsPath.join("/");
return res;
}
/**
* local: x.md -> x.dup.md -> upload to remote
* remote: x.md -> download to local -> using original name x.md
*/
export async function duplicateFile(
key: string,
left: FakeFs,
right: FakeFs,
uploadCallback: (entity: Entity) => Promise<any>,
downloadCallback: (entity: Entity) => Promise<any>
) {
let key2 = getFileRename(key);
let usable = false;
do {
try {
const s = await left.stat(key2);
if (s === null || s === undefined) {
throw Error(`not exist $${key2}`);
}
console.debug(`key2=${key2} exists, cannot use for new file`);
key2 = getFileRename(key2);
console.debug(`key2=${key2} is prepared for next try`);
} catch (e) {
// not exists, exactly what we want
console.debug(`key2=${key2} doesn't exist, usable for new file`);
usable = true;
}
} while (!usable);
await left.rename(key, key2);
/**
* x.dup.md -> upload to remote
*/
async function f1() {
const k = await copyFile(key2, left, right);
await uploadCallback(k.entity);
return k.entity;
}
/**
* x.md -> download to local
*/
async function f2() {
const k = await copyFile(key, right, left);
await downloadCallback(k.entity);
return k.entity;
}
const [resUpload, resDownload] = await Promise.all([f1(), f2()]);
return {
upload: resUpload,
download: resDownload,
};
}

765
pro/src/fsGoogleDrive.ts Normal file
View File

@ -0,0 +1,765 @@
// https://developers.google.com/identity/protocols/oauth2/native-app
// https://developers.google.com/identity/protocols/oauth2/javascript-implicit-flow
// https://developers.google.com/identity/protocols/oauth2/web-server
import { entries } from "lodash";
import * as mime from "mime-types";
import { requestUrl } from "obsidian";
import PQueue from "p-queue";
import { DEFAULT_CONTENT_TYPE, type Entity } from "../../src/baseTypes";
import { FakeFs } from "../../src/fsAll";
import {
getFolderLevels,
splitFileSizeToChunkRanges,
unixTimeToStr,
} from "../../src/misc";
import {
DEFAULT_GOOGLEDRIVE_CLIENT_ID,
DEFAULT_GOOGLEDRIVE_CLIENT_SECRET,
type GoogleDriveConfig,
} from "./baseTypesPro";
export const DEFAULT_GOOGLEDRIVE_CONFIG: GoogleDriveConfig = {
accessToken: "",
refreshToken: "",
accessTokenExpiresInMs: 0,
accessTokenExpiresAtTimeMs: 0,
credentialsShouldBeDeletedAtTimeMs: 0,
scope: "https://www.googleapis.com/auth/drive.file",
};
const FOLDER_MIME_TYPE = "application/vnd.google-apps.folder";
/**
* A simplified version of the type
*
*/
interface File {
kind?: string;
driveId?: string;
fileExtension?: string;
copyRequiresWriterPermission?: boolean;
md5Checksum?: string;
writersCanShare?: boolean;
viewedByMe?: boolean;
mimeType?: string;
parents?: string[];
thumbnailLink?: string;
iconLink?: string;
shared?: boolean;
headRevisionId?: string;
webViewLink?: string;
webContentLink?: string;
size?: string;
viewersCanCopyContent?: boolean;
hasThumbnail?: boolean;
spaces?: string[];
folderColorRgb?: string;
id?: string;
name?: string;
description?: string;
starred?: boolean;
trashed?: boolean;
explicitlyTrashed?: boolean;
createdTime?: string;
modifiedTime?: string;
modifiedByMeTime?: string;
viewedByMeTime?: string;
sharedWithMeTime?: string;
quotaBytesUsed?: string;
version?: string;
originalFilename?: string;
ownedByMe?: boolean;
fullFileExtension?: string;
isAppAuthorized?: boolean;
teamDriveId?: string;
hasAugmentedPermissions?: boolean;
thumbnailVersion?: string;
trashedTime?: string;
modifiedByMe?: boolean;
permissionIds?: string[];
resourceKey?: string;
sha1Checksum?: string;
sha256Checksum?: string;
}
interface GDEntity extends Entity {
id: string;
parentID: string | undefined;
parentIDPath: string | undefined;
isFolder: boolean;
}
/**
* https://developers.google.com/identity/protocols/oauth2/web-server#httprest_7
* @param refreshToken
*/
export const sendRefreshTokenReq = async (refreshToken: string) => {
console.debug(`refreshing token`);
const x = await fetch("https://oauth2.googleapis.com/token", {
method: "POST",
headers: {
"Content-Type": "application/x-www-form-urlencoded",
},
body: new URLSearchParams({
client_id: DEFAULT_GOOGLEDRIVE_CLIENT_ID ?? "",
client_secret: DEFAULT_GOOGLEDRIVE_CLIENT_SECRET ?? "",
grant_type: "refresh_token",
refresh_token: refreshToken,
}).toString(),
});
if (x.status === 200) {
const y = await x.json();
console.debug(`new token obtained`);
return y;
} else {
throw Error(`cannot refresh an access token`);
}
// {
// "access_token": "1/fFAGRNJru1FTz70BzhT3Zg",
// "expires_in": 3920,
// "scope": "https://www.googleapis.com/auth/drive.file",
// "token_type": "Bearer"
// }
};
const fromFileToGDEntity = (
file: File,
parentID: string,
parentFolderPath: string | undefined /* for bfs */
) => {
if (parentID === undefined || parentID === "" || parentID === "root") {
throw Error(`parentID=${parentID} should not be in fromFileToGDEntity`);
}
let keyRaw = file.name!;
if (
parentFolderPath !== undefined &&
parentFolderPath !== "" &&
parentFolderPath !== "/"
) {
if (!parentFolderPath.endsWith("/")) {
throw Error(
`parentFolderPath=${parentFolderPath} should not be in fromFileToGDEntity`
);
}
keyRaw = `${parentFolderPath}${file.name}`;
}
const isFolder = file.mimeType === FOLDER_MIME_TYPE;
if (isFolder) {
keyRaw = `${keyRaw}/`;
}
return {
key: keyRaw,
keyRaw: keyRaw,
mtimeCli: Date.parse(file.modifiedTime!),
mtimeSvr: Date.parse(file.modifiedTime!),
size: isFolder ? 0 : Number.parseInt(file.size!),
sizeRaw: isFolder ? 0 : Number.parseInt(file.size!),
hash: isFolder ? undefined : file.md5Checksum!,
id: file.id!,
parentID: parentID,
isFolder: isFolder,
} as GDEntity;
};
export class FakeFsGoogleDrive extends FakeFs {
kind: string;
googleDriveConfig: GoogleDriveConfig;
remoteBaseDir: string;
vaultFolderExists: boolean;
saveUpdatedConfigFunc: () => Promise<any>;
keyToGDEntity: Record<string, GDEntity>;
baseDirID: string;
constructor(
googleDriveConfig: GoogleDriveConfig,
vaultName: string,
saveUpdatedConfigFunc: () => Promise<any>
) {
super();
this.kind = "googledrive";
this.googleDriveConfig = googleDriveConfig;
this.remoteBaseDir =
this.googleDriveConfig.remoteBaseDir || vaultName || "";
this.vaultFolderExists = false;
this.saveUpdatedConfigFunc = saveUpdatedConfigFunc;
this.keyToGDEntity = {};
this.baseDirID = "";
}
async _init() {
// get accessToken
await this._getAccessToken();
// check vault folder exists
if (this.vaultFolderExists) {
// pass
} else {
const q = encodeURIComponent(
`name='${this.remoteBaseDir}' and mimeType='application/vnd.google-apps.folder' and trashed=false`
);
const url: string = `https://www.googleapis.com/drive/v3/files?q=${q}&pageSize=1000&fields=kind,nextPageToken,files(kind,fileExtension,md5Checksum,mimeType,parents,size,spaces,id,name,trashed,createdTime,modifiedTime,quotaBytesUsed,originalFilename,fullFileExtension,sha1Checksum,sha256Checksum)`;
const k = await fetch(url, {
method: "GET",
headers: {
Authorization: `Bearer ${await this._getAccessToken()}`,
},
});
const k1: { files: File[] } = await k.json();
// console.debug(k1);
if (k1.files.length > 0) {
// yeah we find it
this.baseDirID = k1.files[0].id!;
this.vaultFolderExists = true;
} else {
// wait, we need to create the folder!
console.debug(`we need to create the base dir ${this.remoteBaseDir}`);
const meta: any = {
mimeType: FOLDER_MIME_TYPE,
name: this.remoteBaseDir,
};
const res = await fetch("https://www.googleapis.com/drive/v3/files", {
method: "POST",
headers: {
Authorization: `Bearer ${await this._getAccessToken()}`,
"Content-Type": "application/json",
},
body: JSON.stringify(meta),
});
const res2: File = await res.json();
if (res.status === 200) {
console.debug(`succeed to create the base dir ${this.remoteBaseDir}`);
this.baseDirID = res2.id!;
this.vaultFolderExists = true;
} else {
throw Error(
`cannot create base dir ${this.remoteBaseDir} in init func.`
);
}
}
}
}
async _getAccessToken() {
if (
this.googleDriveConfig.accessToken === "" ||
this.googleDriveConfig.refreshToken === ""
) {
throw Error("The user has not manually auth yet.");
}
const ts = Date.now();
if (this.googleDriveConfig.accessTokenExpiresAtTimeMs > ts) {
return this.googleDriveConfig.accessToken;
}
// refresh
const k = await sendRefreshTokenReq(this.googleDriveConfig.refreshToken);
this.googleDriveConfig.accessToken = k.access_token;
this.googleDriveConfig.accessTokenExpiresInMs = k.expires_in * 1000;
this.googleDriveConfig.accessTokenExpiresAtTimeMs =
ts + k.expires_in * 1000 - 60 * 2 * 1000;
await this.saveUpdatedConfigFunc();
console.info("Google Drive accessToken updated");
return this.googleDriveConfig.accessToken;
}
/**
* https://developers.google.com/drive/api/reference/rest/v3/files/list
*/
async walk(): Promise<Entity[]> {
await this._init();
const allFiles: GDEntity[] = [];
// bfs
const queue = new PQueue({
concurrency: 5, // TODO: make it configurable?
autoStart: true,
});
queue.on("error", (error) => {
queue.pause();
queue.clear();
throw error;
});
let parents = [
{
id: this.baseDirID, // special init, from already created root folder ID
folderPath: "",
},
];
while (parents.length !== 0) {
const children: typeof parents = [];
for (const { id, folderPath } of parents) {
queue.add(async () => {
const filesUnderFolder = await this._walkFolder(id, folderPath);
for (const f of filesUnderFolder) {
allFiles.push(f);
if (f.isFolder) {
// keyRaw itself already has a tailing slash, no more slash here
// keyRaw itself also already has full path
const child = {
id: f.id,
folderPath: f.keyRaw,
};
// console.debug(
// `looping result of _walkFolder(${id},${folderPath}), adding child=${JSON.stringify(
// child
// )}`
// );
children.push(child);
}
}
});
}
await queue.onIdle();
parents = children;
}
// console.debug(`in the end of walk:`);
// console.debug(allFiles);
// console.debug(this.keyToGDEntity);
return allFiles;
}
async _walkFolder(parentID: string, parentFolderPath: string) {
// console.debug(
// `input of single level: parentID=${parentID}, parentFolderPath=${parentFolderPath}`
// );
const filesOneLevel: GDEntity[] = [];
let nextPageToken: string | undefined = undefined;
if (parentID === undefined || parentID === "" || parentID === "root") {
// we should never start from root
// because we encapsulate the vault inside a folder
throw Error(`something goes wrong walking folder`);
}
do {
const q = encodeURIComponent(
`'${parentID}' in parents and trashed=false`
);
const pageToken =
nextPageToken !== undefined ? `&pageToken=${nextPageToken}` : "";
const url: string = `https://www.googleapis.com/drive/v3/files?q=${q}&pageSize=1000&fields=kind,nextPageToken,files(kind,fileExtension,md5Checksum,mimeType,parents,size,spaces,id,name,trashed,createdTime,modifiedTime,quotaBytesUsed,originalFilename,fullFileExtension,sha1Checksum,sha256Checksum)${pageToken}`;
const k = await fetch(url, {
method: "GET",
headers: {
Authorization: `Bearer ${await this._getAccessToken()}`,
},
});
if (k.status !== 200) {
throw Error(`cannot walk for parentID=${parentID}`);
}
const k1 = await k.json();
// console.debug(k1);
for (const file of k1.files as File[]) {
const entity = fromFileToGDEntity(file, parentID, parentFolderPath);
this.keyToGDEntity[entity.keyRaw] = entity; // build cache
filesOneLevel.push(entity);
}
nextPageToken = k1.nextPageToken;
} while (nextPageToken !== undefined);
// console.debug(filesOneLevel);
return filesOneLevel;
}
async walkPartial(): Promise<Entity[]> {
await this._init();
const filesInLevel = await this._walkFolder(this.baseDirID, "");
return filesInLevel;
}
/**
* https://developers.google.com/drive/api/reference/rest/v3/files/get
* https://developers.google.com/drive/api/guides/fields-parameter
*/
async stat(key: string): Promise<Entity> {
await this._init();
// TODO: we already have a cache, should we call again?
const cachedEntity = this.keyToGDEntity[key];
const fileID = cachedEntity?.id;
if (cachedEntity === undefined || fileID === undefined) {
throw Error(`no fileID found for key=${key}`);
}
const url: string = `https://www.googleapis.com/drive/v3/files/${fileID}?fields=kind,fileExtension,md5Checksum,mimeType,parents,size,spaces,id,name,trashed,createdTime,modifiedTime,quotaBytesUsed,originalFilename,fullFileExtension,sha1Checksum,sha256Checksum`;
const k = await fetch(url, {
method: "GET",
headers: {
Authorization: `Bearer ${await this._getAccessToken()}`,
},
});
if (k.status !== 200) {
throw Error(`cannot get file meta fileID=${fileID}, key=${key}`);
}
const k1: File = await k.json();
const entity = fromFileToGDEntity(
k1,
cachedEntity.parentID!,
cachedEntity.parentIDPath!
);
// insert back to cache?? to update it??
this.keyToGDEntity[key] = entity;
return entity;
}
/**
* https://developers.google.com/drive/api/guides/folder
*/
async mkdir(
key: string,
mtime: number | undefined,
ctime: number | undefined
): Promise<Entity> {
if (!key.endsWith("/")) {
throw Error(`you should not mkdir on key=${key}`);
}
await this._init();
// xxx/ => ["xxx"]
// xxx/yyy/zzz/ => ["xxx", "xxx/yyy", "xxx/yyy/zzz"]
const folderLevels = getFolderLevels(key);
let parentFolderPath: string | undefined = undefined;
let parentID: string | undefined = undefined;
if (folderLevels.length === 0) {
throw Error(`cannot getFolderLevels of ${key}`);
} else if (folderLevels.length === 1) {
parentID = this.baseDirID;
parentFolderPath = ""; // ignore base dir
} else {
// length > 1
parentFolderPath = `${folderLevels[folderLevels.length - 2]}/`;
if (!(parentFolderPath in this.keyToGDEntity)) {
throw Error(
`parent of ${key}: ${parentFolderPath} is not created before??`
);
}
parentID = this.keyToGDEntity[parentFolderPath].id;
}
// xxx/yyy/zzz/ => ["xxx", "xxx/yyy", "xxx/yyy/zzz"] => "xxx/yyy/zzz" => "zzz"
let folderItselfWithoutSlash = folderLevels[folderLevels.length - 1];
folderItselfWithoutSlash = folderItselfWithoutSlash.split("/").pop()!;
const meta: any = {
mimeType: FOLDER_MIME_TYPE,
modifiedTime: unixTimeToStr(mtime, true),
createdTime: unixTimeToStr(ctime, true),
name: folderItselfWithoutSlash,
parents: [parentID],
};
const res = await fetch("https://www.googleapis.com/drive/v3/files", {
method: "POST",
headers: {
Authorization: `Bearer ${await this._getAccessToken()}`,
"Content-Type": "application/json",
},
body: JSON.stringify(meta),
});
if (res.status !== 200 && res.status !== 201) {
throw Error(`create folder ${key} failed! meta=${JSON.stringify(meta)}`);
}
const res2: File = await res.json();
// console.debug(res2);
const entity = fromFileToGDEntity(res2, parentID, parentFolderPath);
// insert into cache
this.keyToGDEntity[key] = entity;
return entity;
}
/**
* https://developers.google.com/drive/api/guides/manage-uploads
* https://stackoverflow.com/questions/65181932/how-i-can-upload-file-to-google-drive-with-google-drive-api
*/
async writeFile(
key: string,
content: ArrayBuffer,
mtime: number,
ctime: number
): Promise<Entity> {
if (key.endsWith("/")) {
throw Error(`should not call writeFile on ${key}`);
}
await this._init();
const contentType =
mime.contentType(mime.lookup(key) || DEFAULT_CONTENT_TYPE) ||
DEFAULT_CONTENT_TYPE;
let parentID: string | undefined = undefined;
let parentFolderPath: string | undefined = undefined;
// "xxx" => []
// "xxx/yyy/zzz.md" => ["xxx", "xxx/yyy"]
const folderLevels = getFolderLevels(key);
if (folderLevels.length === 0) {
// root
parentID = this.baseDirID;
parentFolderPath = "";
} else {
parentFolderPath = `${folderLevels[folderLevels.length - 1]}/`;
if (!(parentFolderPath in this.keyToGDEntity)) {
throw Error(
`parent of ${key}: ${parentFolderPath} is not created before??`
);
}
parentID = this.keyToGDEntity[parentFolderPath].id;
}
const fileItself = key.split("/").pop()!;
if (content.byteLength <= 5 * 1024 * 1024) {
const formData = new FormData();
const meta: any = {
name: fileItself,
modifiedTime: unixTimeToStr(mtime, true),
createdTime: unixTimeToStr(ctime, true),
parents: [parentID],
};
formData.append(
"metadata",
new Blob([JSON.stringify(meta)], {
type: "application/json; charset=UTF-8",
})
);
formData.append("media", new Blob([content], { type: contentType }));
const res = await fetch(
"https://www.googleapis.com/upload/drive/v3/files?uploadType=multipart&fields=kind,fileExtension,md5Checksum,mimeType,parents,size,spaces,id,name,trashed,createdTime,modifiedTime,quotaBytesUsed,originalFilename,fullFileExtension,sha1Checksum,sha256Checksum",
{
method: "POST",
headers: {
Authorization: `Bearer ${await this._getAccessToken()}`,
},
body: formData,
}
);
if (res.status !== 200 && res.status !== 201) {
throw Error(`create file ${key} failed! meta=${JSON.stringify(meta)}`);
}
const res2: File = await res.json();
console.debug(
`upload ${key} with ${JSON.stringify(meta)}, res2=${JSON.stringify(
res2
)}`
);
const entity = fromFileToGDEntity(res2, parentID, parentFolderPath);
// insert into cache
this.keyToGDEntity[key] = entity;
return entity;
} else {
const meta: any = {
name: fileItself,
modifiedTime: unixTimeToStr(mtime, true),
createdTime: unixTimeToStr(ctime, true),
parents: [parentID],
};
const bodyStr = JSON.stringify(meta);
const headers: HeadersInit = {
Authorization: `Bearer ${await this._getAccessToken()}`,
"Content-Type": "application/json",
"Content-Length": `${bodyStr.length}`,
"X-Upload-Content-Type": contentType,
"X-Upload-Content-Length": `${content.byteLength}`,
};
const res = await fetch(
"https://www.googleapis.com/upload/drive/v3/files?uploadType=resumable&fields=kind,fileExtension,md5Checksum,mimeType,parents,size,spaces,id,name,trashed,createdTime,modifiedTime,quotaBytesUsed,originalFilename,fullFileExtension,sha1Checksum,sha256Checksum",
{
method: "POST",
headers: headers,
body: bodyStr,
}
);
if (res.status !== 200) {
throw Error(
`create resumable file ${key} failed! meta=${JSON.stringify(
meta
)}, header=${JSON.stringify(headers)}`
);
}
const uploadLocation = res.headers.get("Location");
if (uploadLocation === null || !uploadLocation.startsWith("http")) {
throw Error(
`create resumable file ${key} failed! meta=${JSON.stringify(
meta
)}, header=${JSON.stringify(headers)}`
);
}
console.debug(`key=${key}, uploadLocaltion=${uploadLocation}`);
// multiples of 256 KB (256 x 1024 bytes) in size
const sizePerChunk = 5 * 4 * 256 * 1024; // 5.24 mb
const chunkRanges = splitFileSizeToChunkRanges(
content.byteLength,
sizePerChunk
);
let entity: GDEntity | undefined = undefined;
// TODO: deal with "Resume an interrupted upload"
// currently (202405) only assume everything goes well...
// TODO: parallel
for (const { start, end } of chunkRanges) {
console.debug(
`key=${key}, start upload chunk ${start}-${end}/${content.byteLength}`
);
const res = await fetch(uploadLocation, {
method: "PUT",
headers: {
Authorization: `Bearer ${await this._getAccessToken()}`,
"Content-Length": `${end - start + 1}`, // the number of bytes in the current chunk
"Content-Range": `bytes ${start}-${end}/${content.byteLength}`,
},
body: content.slice(start, end + 1), // TODO: slice() is a copy, may be we can optimize it
});
if (res.status >= 400 && res.status <= 599) {
throw Error(
`create resumable file ${key} failed! meta=${JSON.stringify(
meta
)}, header=${JSON.stringify(headers)}`
);
}
if (res.status === 200 || res.status === 201) {
const res2: File = await res.json();
console.debug(
`upload ${key} with ${JSON.stringify(meta)}, res2=${JSON.stringify(
res2
)}`
);
if (res2.id === undefined || res2.id === null || res2.id === "") {
// TODO: what's this??
} else {
entity = fromFileToGDEntity(res2, parentID, parentFolderPath);
// insert into cache
this.keyToGDEntity[key] = entity;
}
}
}
if (entity === undefined) {
throw Error(`something goes wrong while uploading large file ${key}`);
}
return entity;
}
}
/**
* https://developers.google.com/drive/api/reference/rest/v3/files/get
*/
async readFile(key: string): Promise<ArrayBuffer> {
if (key.endsWith("/")) {
throw Error(`you should not call readFile on ${key}`);
}
await this._init();
const fileID = this.keyToGDEntity[key]?.id;
if (fileID === undefined) {
throw Error(`no fileID found for key=${key}`);
}
const res1 = await fetch(
`https://www.googleapis.com/drive/v3/files/${fileID}?alt=media`,
{
method: "GET",
headers: {
Authorization: `Bearer ${await this._getAccessToken()}`,
},
}
);
if (res1.status !== 200) {
throw Error(`cannot download ${key} using fileID=${fileID}`);
}
const res2 = await res1.arrayBuffer();
return res2;
}
async rename(key1: string, key2: string): Promise<void> {
throw new Error("Method not implemented.");
}
/**
* https://developers.google.com/drive/api/guides/delete
* https://developers.google.com/drive/api/reference/rest/v3/files/update
*/
async rm(key: string): Promise<void> {
await this._init();
const fileID = this.keyToGDEntity[key]?.id;
if (fileID === undefined) {
throw Error(`no fileID found for key=${key}`);
}
const res1 = await fetch(
`https://www.googleapis.com/drive/v3/files/${fileID}`,
{
method: "PATCH",
headers: {
Authorization: `Bearer ${await this._getAccessToken()}`,
},
body: JSON.stringify({
trashed: true,
}),
}
);
if (res1.status !== 200) {
throw Error(`cannot rm ${key} using fileID=${fileID}`);
}
}
async checkConnect(callbackFunc?: any): Promise<boolean> {
// if we can init, we can connect
try {
await this._init();
return true;
} catch (err) {
console.debug(err);
callbackFunc?.(err);
return false;
}
}
async getUserDisplayName(): Promise<string> {
throw new Error("Method not implemented.");
}
/**
* https://developers.google.com/identity/protocols/oauth2/web-server#tokenrevoke
*/
async revokeAuth(): Promise<any> {
const x = await fetch(
`https://oauth2.googleapis.com/revoke?token=${this._getAccessToken()}`,
{
method: "POST",
headers: {
"Content-Type": "application/x-www-form-urlencoded",
},
}
);
if (x.status === 200) {
return true;
} else {
throw Error(`cannot revoke`);
}
}
allowEmptyFile(): boolean {
return true;
}
}

75
pro/src/langs/en.json Normal file
View File

@ -0,0 +1,75 @@
{
"settings_conflictaction_smart_conflict": "Smart Conflict (PRO) (beta)",
"settings_conflictaction_smart_conflict_desc": "<p><strong>!!It's a PRO feature! You need an online account for this feature!!</strong>(<a href=\"#settings-pro\">scroll down</a> for more info about PRO account.)</p><p><ul><li>For small markdown files, the plugin tries to merge them with diff3 algorithm.</li><li>For large files or not-markdown files, the plugin saves both files by renaming them.</li></ul></p><p><strong>Please manually backup your vaule before using this feature!</strong></p>",
"protocol_pro_connecting": "Connectting",
"protocol_pro_connect_manualinput_succ": "You've connected",
"protocol_pro_connect_fail": "Something went wrong from response from Remotely Save official website. Maybe the network connection is not good. Maybe you rejected the auth?",
"protocol_pro_connect_succ_revoke": "You've connected as user {{email}}. If you want to disconnect, click this button.",
"modal_googledriveauth_tutorial": "<p>Please firstly go to the address, then go on the auth flow. In the end, you will see a code, please paste that code here and submit.</p>",
"modal_googledriveauth_copybutton": "Click to copy the auth url",
"modal_googledriveauth_copynotice": "The auth url is copied to the clipboard!",
"modal_googledrivce_maualinput": "The Code from the website",
"modal_googledrivce_maualinput_desc": "Please input the code here from the end of auth flow, and press confirm.",
"modal_googledrive_maualinput_notice": "We are trying to connect to Google and update the credentials...",
"modal_googledrive_maualinput_succ_notice": "Great! The credentials are updated!",
"modal_googledrive_maualinput_fail_notice": "Oops! Failed to update the credentials. Please try again later.",
"modal_googledriverevokeauth_step1": "Step 1: Go to the following address, you can remove the connection there.",
"modal_googledriverevokeauth_step2": "Step 2: Click the button below, to clean the locally-saved login credentials.",
"modal_googledriverevokeauth_clean": "Clean Locally-Saved Login Credentials",
"modal_googledriverevokeauth_clean_desc": "You need to click the button.",
"modal_googledriverevokeauth_clean_button": "Clean",
"modal_googledriverevokeauth_clean_notice": "Cleaned!",
"modal_googledriverevokeauth_clean_fail": "Something goes wrong while revoking.",
"modal_prorevokeauth": "Revoke auth by clicking here and follow the steps.",
"modal_prorevokeauth_clean": "Clean",
"modal_prorevokeauth_clean_desc": "Clean local auth record",
"modal_prorevokeauth_clean_button": "Clean",
"modal_prorevokeauth_clean_notice": "Local auth record is cleaned",
"modal_prorevokeauth_clean_fail": "Fail to clean local auth record.",
"modal_proauth_copybutton": "Click to copy the auth url",
"modal_proauth_copynotice": "The auth url is copied to the clipboard!",
"modal_proauth_maualinput": "The Code from the website",
"modal_proauth_maualinput_desc": "Please input the code here from the end of auth flow, and press confirm.",
"modal_proauth_maualinput_notice": "Trying to connect, wait...",
"modal_proauth_maualinput_conn_fail": "Failed to connect",
"settings_googledrive": "Google Drive (PRO) (beta)",
"settings_chooseservice_googledrive": "Google Drive (PRO) (beta)",
"settings_googledrive_disclaimer1": "Disclaimer: This app is NOT an official Google product. The app just uses Google Drive's public api.",
"settings_googledrive_disclaimer2": "Disclaimer: The information is stored locally. Other malicious/harmful/faulty plugins could read the info. If you see any unintentional access to your Google Drive, please immediately disconnect this app on https://myaccount.google.com/permissions .",
"settings_googledrive_pro_desc": "<p><strong>!!It's a PRO feature of Remotely Save! You need a Remotely Save online account for this feature!!</strong>(<a href=\"#settings-pro\">scroll down</a> for more info about PRO account.)</p>",
"settings_googledrive_notshowuphint": "Google Drive Settings Not Available",
"settings_googledrive_notshowuphint_desc": "Google Drive settings are not available, because you haven't subscribed to the PRO feature in your Remotely Save account.",
"settings_googledrive_notshowuphint_view_pro": "View PRO Settings",
"settings_googledrive_folder": "We will create and sync inside the folder {{remoteBaseDir}} on your Google Drive. DO NOT create this folder by yourself manually.",
"settings_googledrive_revoke": "Revoke Auth",
"settings_googledrive_revoke_desc": "You've connected. If you want to disconnect, click this button.",
"settings_googledrive_revoke_button": "Revoke Auth",
"settings_googledrive_auth": "Auth",
"settings_googledrive_auth_desc": "Auth.",
"settings_googledrive_auth_button": "Auth",
"settings_googledrive_connect_succ": "Great! We can connect to Google Drive!",
"settings_googledrive_connect_fail": "We cannot connect to Google Drive.",
"settings_export_googledrive_button": "Export Google Drive Part",
"settings_pro": "Account (for PRO features)",
"settings_pro_tutorial": "<p>Using <stong>basic</strong> features of Remotely Save is <strong>FREE</strong> and do <strong>NOT</strong> need an account.</p><p>However, you will <strong>need</strong> an online account and <strong>PAY</strong> for the <strong>PRO</strong> features such as smart conflict.</p><p>Firstly please click the button to sign up and sign in to the website: <a href=\"https://remotelysave.com\">https://remotelysave.com</a>. Notice: It's different from, and NOT affiliated with Obsidian account.</p><p>Secondly please \"connect\" your local device to your online account.",
"settings_pro_features": "Features",
"settings_pro_features_desc": "Here are features you've enabled:<br/>{{{features}}}",
"settings_pro_features_refresh_button": "Check again",
"settings_pro_features_refresh_fetch": "Fetching...",
"settings_pro_features_refresh_succ": "Refreshed!",
"settings_pro_revoke": "Disconnect",
"settings_pro_revoke_desc": "You've connected as user {{email}}. If you want to disconnect, click this button.",
"settings_pro_revoke_button": "Disconnect",
"settings_pro_intro": "Remotely Save Online Account",
"settings_pro_intro_desc": "Click the button to jump to the website to sign up or sign in.",
"settings_pro_intro_button": "Sign Up / Sign In",
"settings_pro_auth": "Connect",
"settings_pro_auth_desc": "After you sign up and sign in the account on the website, you need to connect your plugin here to the online account. Please click the button to connect.",
"settings_pro_auth_button": "Connect"
}

9
pro/src/langs/index.ts Normal file
View File

@ -0,0 +1,9 @@
import en from "./en.json";
import zh_cn from "./zh_cn.json";
import zh_tw from "./zh_tw.json";
export const LANGS = {
en: en,
zh_cn: zh_cn,
zh_tw: zh_tw,
};

75
pro/src/langs/zh_cn.json Normal file
View File

@ -0,0 +1,75 @@
{
"settings_conflictaction_smart_conflict": "智能处理冲突 (PRO) (beta)",
"settings_conflictaction_smart_conflict_desc": "<p><strong>!!这是 PRO付费功能! 您需要在线账号来使用此功能!!</strong><a href=\"#settings-pro\">向下滑</a>可以看到 PRO 账号的更多信息。)</p><p><ul><li>小 markdown 文件,本插件尝试使用 diff3 算法合并它;</li><li>对于大文件或非 markdown 文件,本插件尝试改名字并均进行保存。</li></ul></p><p><strong>请注意先手动备份 vault 文件再用此功能!</strong></p>",
"protocol_pro_connecting": "正在连接",
"protocol_pro_connect_manualinput_succ": "连接成功",
"protocol_pro_connect_fail": "Remotely Save 官网返回错误。可能是网络连接不稳定。也可能是您拒绝了授权?",
"protocol_pro_connect_succ_revoke": "您已连接上账号 {{email}}。如果要取消连接,请点击此按钮。",
"modal_googledriveauth_tutorial": "<p>请访问此网址,然后会进入授权流程。最后,您会看到一个码,请复制粘贴到这里然后提交。</p>",
"modal_googledriveauth_copybutton": "点击以复制网址",
"modal_googledriveauth_copynotice": "网址已复制!",
"modal_googledrivce_maualinput": "网站上的码",
"modal_googledrivce_maualinput_desc": "请粘贴授权流程最后的那个码,然后点击确认。",
"modal_googledrive_maualinput_notice": "正在尝试连接 Google 并更新授权信息......",
"modal_googledrive_maualinput_succ_notice": "很好!授权信息已更新!",
"modal_googledrive_maualinput_fail_notice": "更新授权信息失败。请稍后重试。",
"modal_googledriverevokeauth_step1": "第 1 步:访问以下网址,可以删除连接。",
"modal_googledriverevokeauth_step2": "第 2 步:点击以下按钮,从而清理本地的登录信息。",
"modal_googledriverevokeauth_clean": "清理本地登录信息",
"modal_googledriverevokeauth_clean_desc": "您需要点击此按钮。",
"modal_googledriverevokeauth_clean_button": "清理",
"modal_googledriverevokeauth_clean_notice": "已清理!",
"modal_googledriverevokeauth_clean_fail": "清理授权时候发生了错误。",
"modal_prorevokeauth": "点击这里和按照步骤取消授权。",
"modal_prorevokeauth_clean": "清理",
"modal_prorevokeauth_clean_desc": "清理本地授权记录",
"modal_prorevokeauth_clean_button": "清理",
"modal_prorevokeauth_clean_notice": "清理本地授权记录完毕",
"modal_prorevokeauth_clean_fail": "清理本地授权记录粗错。",
"modal_proauth_copybutton": "点击从而复制授权网址",
"modal_proauth_copynotice": "授权网址已复制!",
"modal_proauth_maualinput": "网站的授权码",
"modal_proauth_maualinput_desc": "请输入授权流程最后一步的授权码,然后点击确认。",
"modal_proauth_maualinput_notice": "正在连接,请稍候......",
"modal_proauth_maualinput_conn_fail": "连接失败",
"settings_googledrive": "Google Drive (PRO) (beta)",
"settings_chooseservice_googledrive": "Google Drive (PRO) (beta)",
"settings_googledrive_disclaimer1": "声明:本插件不是 Google 的官方产品。只是用到了它的公开 API。",
"settings_googledrive_disclaimer2": "声明:您所输入的信息存储于本地。其它有害的或者出错的插件,是有可能读取到这些信息的。如果您发现任何不符合预期的 Google Drive 访问,请立刻在以下网站操作断开连接: https://myaccount.google.com/permissions 。",
"settings_googledrive_pro_desc": "<p><strong>!!这是 PRO付费功能! 您需要在线账号来使用此功能!!</strong><a href=\"#settings-pro\">向下滑</a>可以看到 PRO 账号的更多信息。)</p>",
"settings_googledrive_notshowuphint": "Google Drive 设置不可用",
"settings_googledrive_notshowuphint_desc": "Google Drive 设置不可用,因为您没有在 Remotely Save 账号里开启这个 PRO 功能。",
"settings_googledrive_notshowuphint_view_pro": "查看 PRO 相关设置",
"settings_googledrive_folder": "我们会在 Google Drive 创建此文件夹并同步内容进去: {{remoteBaseDir}} 。请不要手动在网站上创建。",
"settings_googledrive_revoke": "撤回鉴权",
"settings_googledrive_revoke_desc": "您现在已连接。如果想取消连接,请点击此按钮。",
"settings_googledrive_revoke_button": "撤回鉴权",
"settings_googledrive_auth": "鉴权",
"settings_googledrive_auth_desc": "鉴权.",
"settings_googledrive_auth_button": "鉴权",
"settings_googledrive_connect_succ": "很好!我们可连接上 Google Drive",
"settings_googledrive_connect_fail": "我们未能连接上 Google Drive。",
"settings_export_googledrive_button": "导出 Google Drive 部分",
"settings_pro": "账号PRO 付费功能)",
"settings_pro_tutorial": "<p>使用 Remotely Save 的<stong>基本</strong>功能是<strong>免费的</strong>,而且<strong>不</strong>需要注册对应账号。</p><p>但是,您<strong>需要</strong>注册账号和对<strong>PRO</strong>功能<strong>付费</strong>使用,如智能处理冲突功能。</p><p>第一步:点击按钮从而注册和登录网站:<a href=\"https://remotelysave.com\">https://remotelysave.com</a>。注意:这和 Obsidian 官方账号无关,是不同的账号。</p><p>第二部:点击“连接”按钮,从而连接本设备和在线账号。",
"settings_pro_features": "功能",
"settings_pro_features_desc": "您开通了以下功能:<br/>{{{features}}}",
"settings_pro_features_refresh_button": "再次检查",
"settings_pro_features_refresh_fetch": "正在获取数据......",
"settings_pro_features_refresh_succ": "已刷新!",
"settings_pro_revoke": "断开连接",
"settings_pro_revoke_desc": "您已连接上账号 {{email}}。如果要取消连接,请点击此按钮。",
"settings_pro_revoke_button": "断开连接",
"settings_pro_intro": "Remotely Save 账号",
"settings_pro_intro_desc": "点击此按钮,从而到网站上注册和登录。",
"settings_pro_intro_button": "注册或登录",
"settings_pro_auth": "连接",
"settings_pro_auth_desc": "在网站上注册和登录后,您需要“连接”本设备和在线账号。请点击按钮开始连接。",
"settings_pro_auth_button": "连接"
}

75
pro/src/langs/zh_tw.json Normal file
View File

@ -0,0 +1,75 @@
{
"settings_conflictaction_smart_conflict": "智慧處理衝突 (PRO) (beta)",
"settings_conflictaction_smart_conflict_desc": "<p><strong>!!這是 PRO付費功能! 您需要線上賬號來使用此功能!!</strong><a href=\"#settings-pro\">向下滑</a>可以看到 PRO 賬號的更多資訊。)</p><p><ul><li>小 markdown 檔案,本外掛嘗試使用 diff3 演算法合併它;</li><li>對於大檔案或非 markdown 檔案,本外掛嘗試改名字並均進行儲存。</li></ul></p><p><strong>請注意先手動備份 vault 檔案再用此功能!</strong></p>",
"protocol_pro_connecting": "正在連線",
"protocol_pro_connect_manualinput_succ": "連線成功",
"protocol_pro_connect_fail": "Remotely Save 官網返回錯誤。可能是網路連線不穩定。也可能是您拒絕了授權?",
"protocol_pro_connect_succ_revoke": "您已連線上賬號 {{email}}。如果要取消連線,請點選此按鈕。",
"modal_googledriveauth_tutorial": "<p>請訪問此網址,然後會進入授權流程。最後,您會看到一個碼,請複製貼上到這裡然後提交。</p>",
"modal_googledriveauth_copybutton": "點選以複製網址",
"modal_googledriveauth_copynotice": "網址已複製!",
"modal_googledrivce_maualinput": "網站上的碼",
"modal_googledrivce_maualinput_desc": "請貼上授權流程最後的那個碼,然後點選確認。",
"modal_googledrive_maualinput_notice": "正在嘗試連線 Google 並更新授權資訊......",
"modal_googledrive_maualinput_succ_notice": "很好!授權資訊已更新!",
"modal_googledrive_maualinput_fail_notice": "更新授權資訊失敗。請稍後重試。",
"modal_googledriverevokeauth_step1": "第 1 步:訪問以下網址,可以刪除連線。",
"modal_googledriverevokeauth_step2": "第 2 步:點選以下按鈕,從而清理本地的登入資訊。",
"modal_googledriverevokeauth_clean": "清理本地登入資訊",
"modal_googledriverevokeauth_clean_desc": "您需要點選此按鈕。",
"modal_googledriverevokeauth_clean_button": "清理",
"modal_googledriverevokeauth_clean_notice": "已清理!",
"modal_googledriverevokeauth_clean_fail": "清理授權時候發生了錯誤。",
"modal_prorevokeauth": "點選這裡和按照步驟取消授權。",
"modal_prorevokeauth_clean": "清理",
"modal_prorevokeauth_clean_desc": "清理本地授權記錄",
"modal_prorevokeauth_clean_button": "清理",
"modal_prorevokeauth_clean_notice": "清理本地授權記錄完畢",
"modal_prorevokeauth_clean_fail": "清理本地授權記錄粗錯。",
"modal_proauth_copybutton": "點選從而複製授權網址",
"modal_proauth_copynotice": "授權網址已複製!",
"modal_proauth_maualinput": "網站的授權碼",
"modal_proauth_maualinput_desc": "請輸入授權流程最後一步的授權碼,然後點選確認。",
"modal_proauth_maualinput_notice": "正在連線,請稍候......",
"modal_proauth_maualinput_conn_fail": "連線失敗",
"settings_googledrive": "Google Drive (PRO) (beta)",
"settings_chooseservice_googledrive": "Google Drive (PRO) (beta)",
"settings_googledrive_disclaimer1": "宣告:本外掛不是 Google 的官方產品。只是用到了它的公開 API。",
"settings_googledrive_disclaimer2": "宣告:您所輸入的資訊儲存於本地。其它有害的或者出錯的外掛,是有可能讀取到這些資訊的。如果您發現任何不符合預期的 Google Drive 訪問,請立刻在以下網站操作斷開連線: https://myaccount.google.com/permissions 。",
"settings_googledrive_pro_desc": "<p><strong>!!這是 PRO付費功能! 您需要線上賬號來使用此功能!!</strong><a href=\"#settings-pro\">向下滑</a>可以看到 PRO 賬號的更多資訊。)</p>",
"settings_googledrive_notshowuphint": "Google Drive 設定不可用",
"settings_googledrive_notshowuphint_desc": "Google Drive 設定不可用,因為您沒有在 Remotely Save 賬號裡開啟這個 PRO 功能。",
"settings_googledrive_notshowuphint_view_pro": "檢視 PRO 相關設定",
"settings_googledrive_folder": "我們會在 Google Drive 建立此資料夾並同步內容進去: {{remoteBaseDir}} 。請不要手動在網站上建立。",
"settings_googledrive_revoke": "撤回鑑權",
"settings_googledrive_revoke_desc": "您現在已連線。如果想取消連線,請點選此按鈕。",
"settings_googledrive_revoke_button": "撤回鑑權",
"settings_googledrive_auth": "鑑權",
"settings_googledrive_auth_desc": "鑑權.",
"settings_googledrive_auth_button": "鑑權",
"settings_googledrive_connect_succ": "很好!我們可連線上 Google Drive",
"settings_googledrive_connect_fail": "我們未能連線上 Google Drive。",
"settings_export_googledrive_button": "匯出 Google Drive 部分",
"settings_pro": "賬號PRO 付費功能)",
"settings_pro_tutorial": "<p>使用 Remotely Save 的<stong>基本</strong>功能是<strong>免費的</strong>,而且<strong>不</strong>需要註冊對應賬號。</p><p>但是,您<strong>需要</strong>註冊賬號和對<strong>PRO</strong>功能<strong>付費</strong>使用,如智慧處理衝突功能。</p><p>第一步:點選按鈕從而註冊和登入網站:<a href=\"https://remotelysave.com\">https://remotelysave.com</a>。注意:這和 Obsidian 官方賬號無關,是不同的賬號。</p><p>第二部:點選“連線”按鈕,從而連線本裝置和線上賬號。",
"settings_pro_features": "功能",
"settings_pro_features_desc": "您開通了以下功能:<br/>{{{features}}}",
"settings_pro_features_refresh_button": "再次檢查",
"settings_pro_features_refresh_fetch": "正在獲取資料......",
"settings_pro_features_refresh_succ": "已重新整理!",
"settings_pro_revoke": "斷開連線",
"settings_pro_revoke_desc": "您已連線上賬號 {{email}}。如果要取消連線,請點選此按鈕。",
"settings_pro_revoke_button": "斷開連線",
"settings_pro_intro": "Remotely Save 賬號",
"settings_pro_intro_desc": "點選此按鈕,從而到網站上註冊和登入。",
"settings_pro_intro_button": "註冊或登入",
"settings_pro_auth": "連線",
"settings_pro_auth_desc": "在網站上註冊和登入後,您需要“連線”本裝置和線上賬號。請點選按鈕開始連線。",
"settings_pro_auth_button": "連線"
}

47
pro/src/localdb.ts Normal file
View File

@ -0,0 +1,47 @@
import type { Entity } from "../../src/baseTypes";
import type { InternalDBs } from "../../src/localdb";
export const upsertFileContentHistoryByVaultAndProfile = async (
db: InternalDBs,
vaultRandomID: string,
profileID: string,
prevSync: Entity,
prevContent: ArrayBuffer
) => {
await db.fileContentHistoryTbl.setItem(
`${vaultRandomID}\t${profileID}\t${prevSync.key}`,
prevContent
);
};
export const getFileContentHistoryByVaultAndProfile = async (
db: InternalDBs,
vaultRandomID: string,
profileID: string,
prevSync: Entity
) => {
return (await db.fileContentHistoryTbl.getItem(
`${vaultRandomID}\t${profileID}\t${prevSync.key}`
)) as ArrayBuffer | null | undefined;
};
export const clearFileContentHistoryByVaultAndProfile = async (
db: InternalDBs,
vaultRandomID: string,
profileID: string,
key: string
) => {
await db.fileContentHistoryTbl.removeItem(
`${vaultRandomID}\t${profileID}\t${key}`
);
};
export const clearAllFileContentHistoryByVault = async (
db: InternalDBs,
vaultRandomID: string
) => {
const keys = (await db.fileContentHistoryTbl.keys()).filter((x) =>
x.startsWith(`${vaultRandomID}\t`)
);
await db.fileContentHistoryTbl.removeItems(keys);
};

View File

@ -0,0 +1,377 @@
import cloneDeep from "lodash/cloneDeep";
import { type App, Modal, Notice, Setting } from "obsidian";
import { getClient } from "../../src/fsGetter";
import type { TransItemType } from "../../src/i18n";
import type RemotelySavePlugin from "../../src/main";
import { stringToFragment } from "../../src/misc";
import { ChangeRemoteBaseDirModal } from "../../src/settings";
import {
DEFAULT_GOOGLEDRIVE_CONFIG,
sendRefreshTokenReq,
} from "./fsGoogleDrive";
class GoogleDriveAuthModal extends Modal {
readonly plugin: RemotelySavePlugin;
readonly authDiv: HTMLDivElement;
readonly revokeAuthDiv: HTMLDivElement;
readonly revokeAuthSetting: Setting;
readonly t: (x: TransItemType, vars?: any) => string;
constructor(
app: App,
plugin: RemotelySavePlugin,
authDiv: HTMLDivElement,
revokeAuthDiv: HTMLDivElement,
revokeAuthSetting: Setting,
t: (x: TransItemType, vars?: any) => string
) {
super(app);
this.plugin = plugin;
this.authDiv = authDiv;
this.revokeAuthDiv = revokeAuthDiv;
this.revokeAuthSetting = revokeAuthSetting;
this.t = t;
}
async onOpen() {
const { contentEl } = this;
const t = this.t;
const authUrl = "https://remotelysave.com/auth/googledrive/start";
const div2 = contentEl.createDiv();
div2.createDiv({
text: stringToFragment(t("modal_googledriveauth_tutorial")),
});
div2.createEl(
"button",
{
text: t("modal_googledriveauth_copybutton"),
},
(el) => {
el.onclick = async () => {
await navigator.clipboard.writeText(authUrl);
new Notice(t("modal_googledriveauth_copynotice"));
};
}
);
contentEl.createEl("p").createEl("a", {
href: authUrl,
text: authUrl,
});
let refreshToken = "";
new Setting(contentEl)
.setName(t("modal_googledrivce_maualinput"))
.setDesc(t("modal_googledrivce_maualinput_desc"))
.addText((text) =>
text
.setPlaceholder("")
.setValue("")
.onChange((val) => {
refreshToken = val.trim();
})
)
.addButton(async (button) => {
button.setButtonText(t("submit"));
button.onClick(async () => {
new Notice(t("modal_googledrive_maualinput_notice"));
try {
if (this.plugin.settings.googledrive === undefined) {
this.plugin.settings.googledrive = cloneDeep(
DEFAULT_GOOGLEDRIVE_CONFIG
);
}
this.plugin.settings.googledrive.refreshToken = refreshToken;
this.plugin.settings.googledrive.accessToken = "access";
this.plugin.settings.googledrive.accessTokenExpiresAtTimeMs = 1;
this.plugin.settings.googledrive.accessTokenExpiresInMs = 1;
// TODO: abstraction leaking now, how to fix?
const k = await sendRefreshTokenReq(refreshToken);
const ts = Date.now();
this.plugin.settings.googledrive.accessToken = k.access_token;
this.plugin.settings.googledrive.accessTokenExpiresInMs =
k.expires_in * 1000;
this.plugin.settings.googledrive.accessTokenExpiresAtTimeMs =
ts + k.expires_in * 1000 - 60 * 2 * 1000;
await this.plugin.saveSettings();
// try to remove data in clipboard
await navigator.clipboard.writeText("");
new Notice(t("modal_googledrive_maualinput_succ_notice"));
} catch (e) {
console.error(e);
new Notice(t("modal_googledrive_maualinput_fail_notice"));
} finally {
this.authDiv.toggleClass(
"googledrive-auth-button-hide",
this.plugin.settings.googledrive.refreshToken !== ""
);
this.revokeAuthDiv.toggleClass(
"googledrive-revoke-auth-button-hide",
this.plugin.settings.googledrive.refreshToken === ""
);
this.close();
}
});
});
}
onClose() {
const { contentEl } = this;
contentEl.empty();
}
}
class GoogleDriveRevokeAuthModal extends Modal {
readonly plugin: RemotelySavePlugin;
readonly authDiv: HTMLDivElement;
readonly revokeAuthDiv: HTMLDivElement;
readonly t: (x: TransItemType, vars?: any) => string;
constructor(
app: App,
plugin: RemotelySavePlugin,
authDiv: HTMLDivElement,
revokeAuthDiv: HTMLDivElement,
t: (x: TransItemType, vars?: any) => string
) {
super(app);
this.plugin = plugin;
this.authDiv = authDiv;
this.revokeAuthDiv = revokeAuthDiv;
this.t = t;
}
async onOpen() {
const t = this.t;
const { contentEl } = this;
contentEl.createEl("p", {
text: t("modal_googledriverevokeauth_step1"),
});
const consentUrl = "https://myaccount.google.com/permissions";
contentEl.createEl("p").createEl("a", {
href: consentUrl,
text: consentUrl,
});
contentEl.createEl("p", {
text: t("modal_googledriverevokeauth_step2"),
});
new Setting(contentEl)
.setName(t("modal_googledriverevokeauth_clean"))
.setDesc(t("modal_googledriverevokeauth_clean_desc"))
.addButton(async (button) => {
button.setButtonText(t("modal_googledriverevokeauth_clean_button"));
button.onClick(async () => {
try {
this.plugin.settings.googledrive = cloneDeep(
DEFAULT_GOOGLEDRIVE_CONFIG
);
await this.plugin.saveSettings();
this.authDiv.toggleClass(
"googledrive-auth-button-hide",
this.plugin.settings.googledrive.refreshToken !== ""
);
this.revokeAuthDiv.toggleClass(
"googledrive-revoke-auth-button-hide",
this.plugin.settings.googledrive.refreshToken === ""
);
new Notice(t("modal_googledriverevokeauth_clean_notice"));
this.close();
} catch (err) {
console.error(err);
new Notice(t("modal_googledriverevokeauth_clean_fail"));
}
});
});
}
onClose() {
const { contentEl } = this;
contentEl.empty();
}
}
export const generateGoogleDriveSettingsPart = (
containerEl: HTMLElement,
t: (x: TransItemType, vars?: any) => string,
app: App,
plugin: RemotelySavePlugin,
saveUpdatedConfigFunc: () => Promise<any> | undefined
) => {
const googleDriveDiv = containerEl.createEl("div", {
cls: "googledrive-hide",
});
googleDriveDiv.toggleClass(
"googledrive-hide",
plugin.settings.serviceType !== "googledrive"
);
googleDriveDiv.createEl("h2", { text: t("settings_googledrive") });
const googleDriveLongDescDiv = googleDriveDiv.createEl("div", {
cls: "settings-long-desc",
});
for (const c of [
t("settings_googledrive_disclaimer1"),
t("settings_googledrive_disclaimer2"),
]) {
googleDriveLongDescDiv.createEl("p", {
text: c,
cls: "googledrive-disclaimer",
});
}
googleDriveLongDescDiv.createEl("p", {
text: t("settings_googledrive_folder", {
remoteBaseDir:
plugin.settings.googledrive.remoteBaseDir || app.vault.getName(),
}),
});
googleDriveLongDescDiv.createDiv({
text: stringToFragment(t("settings_googledrive_pro_desc")),
cls: "googledrive-disclaimer",
});
const googleDriveNotShowUpHintSetting = new Setting(googleDriveDiv)
.setName(t("settings_googledrive_notshowuphint"))
.setDesc(t("settings_googledrive_notshowuphint_desc"))
.addButton(async (button) => {
button.setButtonText(t("settings_googledrive_notshowuphint_view_pro"));
button.onClick(async () => {
window.location.href = "#settings-pro";
});
});
const googleDriveAllowedToUsedDiv = googleDriveDiv.createDiv();
// if pro enabled, show up; otherwise hide.
const allowGoogleDrive =
plugin.settings.pro?.enabledProFeatures.filter(
(x) => x.featureName === "feature-google_drive"
).length === 1;
console.debug(`allow to show up google drive settings? ${allowGoogleDrive}`);
if (allowGoogleDrive) {
googleDriveAllowedToUsedDiv.removeClass("googledrive-allow-to-use-hide");
googleDriveNotShowUpHintSetting.settingEl.addClass(
"googledrive-allow-to-use-hide"
);
} else {
googleDriveAllowedToUsedDiv.addClass("googledrive-allow-to-use-hide");
googleDriveNotShowUpHintSetting.settingEl.removeClass(
"googledrive-allow-to-use-hide"
);
}
const googleDriveSelectAuthDiv = googleDriveAllowedToUsedDiv.createDiv();
const googleDriveAuthDiv = googleDriveSelectAuthDiv.createDiv({
cls: "googledrive-auth-button-hide settings-auth-related",
});
const googleDriveRevokeAuthDiv = googleDriveSelectAuthDiv.createDiv({
cls: "googledrive-revoke-auth-button-hide settings-auth-related",
});
const googleDriveRevokeAuthSetting = new Setting(googleDriveRevokeAuthDiv)
.setName(t("settings_googledrive_revoke"))
.setDesc(t("settings_googledrive_revoke_desc"))
.addButton(async (button) => {
button.setButtonText(t("settings_googledrive_revoke_button"));
button.onClick(async () => {
new GoogleDriveRevokeAuthModal(
app,
plugin,
googleDriveAuthDiv,
googleDriveRevokeAuthDiv,
t
).open();
});
});
new Setting(googleDriveAuthDiv)
.setName(t("settings_googledrive_auth"))
.setDesc(t("settings_googledrive_auth_desc"))
.addButton(async (button) => {
button.setButtonText(t("settings_googledrive_auth_button"));
button.onClick(async () => {
const modal = new GoogleDriveAuthModal(
app,
plugin,
googleDriveAuthDiv,
googleDriveRevokeAuthDiv,
googleDriveRevokeAuthSetting,
t
);
plugin.oauth2Info.helperModal = modal;
plugin.oauth2Info.authDiv = googleDriveAuthDiv;
plugin.oauth2Info.revokeDiv = googleDriveRevokeAuthDiv;
plugin.oauth2Info.revokeAuthSetting = googleDriveRevokeAuthSetting;
modal.open();
});
});
googleDriveAuthDiv.toggleClass(
"googledrive-auth-button-hide",
plugin.settings.googledrive.refreshToken !== ""
);
googleDriveRevokeAuthDiv.toggleClass(
"googledrive-revoke-auth-button-hide",
plugin.settings.googledrive.refreshToken === ""
);
let newgoogleDriveRemoteBaseDir =
plugin.settings.googledrive.remoteBaseDir || "";
new Setting(googleDriveAllowedToUsedDiv)
.setName(t("settings_remotebasedir"))
.setDesc(t("settings_remotebasedir_desc"))
.addText((text) =>
text
.setPlaceholder(app.vault.getName())
.setValue(newgoogleDriveRemoteBaseDir)
.onChange((value) => {
newgoogleDriveRemoteBaseDir = value.trim();
})
)
.addButton((button) => {
button.setButtonText(t("confirm"));
button.onClick(() => {
new ChangeRemoteBaseDirModal(
app,
plugin,
newgoogleDriveRemoteBaseDir,
"googledrive"
).open();
});
});
new Setting(googleDriveAllowedToUsedDiv)
.setName(t("settings_checkonnectivity"))
.setDesc(t("settings_checkonnectivity_desc"))
.addButton(async (button) => {
button.setButtonText(t("settings_checkonnectivity_button"));
button.onClick(async () => {
new Notice(t("settings_checkonnectivity_checking"));
const client = getClient(plugin.settings, app.vault.getName(), () =>
plugin.saveSettings()
);
const errors = { msg: "" };
const res = await client.checkConnect((err: any) => {
errors.msg = `${err}`;
});
if (res) {
new Notice(t("settings_googledrive_connect_succ"));
} else {
new Notice(t("settings_googledrive_connect_fail"));
new Notice(errors.msg);
}
});
});
return {
googleDriveDiv: googleDriveDiv,
googleDriveAllowedToUsedDiv: googleDriveAllowedToUsedDiv,
googleDriveNotShowUpHintSetting: googleDriveNotShowUpHintSetting,
};
};

386
pro/src/settingsPro.ts Normal file
View File

@ -0,0 +1,386 @@
import cloneDeep from "lodash/cloneDeep";
import { type App, Modal, Notice, Setting } from "obsidian";
import { features } from "process";
import type { TransItemType } from "../../src/i18n";
import type RemotelySavePlugin from "../../src/main";
import { stringToFragment } from "../../src/misc";
import {
DEFAULT_PRO_CONFIG,
generateAuthUrlAndCodeVerifierChallenge,
getAndSaveProEmail,
getAndSaveProFeatures,
sendAuthReq,
setConfigBySuccessfullAuthInplace,
} from "./account";
import {
type FeatureInfo,
PRO_CLIENT_ID,
type ProConfig,
} from "./baseTypesPro";
export class ProAuthModal extends Modal {
readonly plugin: RemotelySavePlugin;
readonly authDiv: HTMLDivElement;
readonly revokeAuthDiv: HTMLDivElement;
readonly revokeAuthSetting: Setting;
readonly proFeaturesListSetting: Setting;
readonly t: (x: TransItemType, vars?: any) => string;
constructor(
app: App,
plugin: RemotelySavePlugin,
authDiv: HTMLDivElement,
revokeAuthDiv: HTMLDivElement,
revokeAuthSetting: Setting,
proFeaturesListSetting: Setting,
t: (x: TransItemType, vars?: any) => string
) {
super(app);
this.plugin = plugin;
this.authDiv = authDiv;
this.revokeAuthDiv = revokeAuthDiv;
this.revokeAuthSetting = revokeAuthSetting;
this.proFeaturesListSetting = proFeaturesListSetting;
this.t = t;
}
async onOpen() {
const { contentEl } = this;
const { authUrl, codeVerifier, codeChallenge } =
await generateAuthUrlAndCodeVerifierChallenge(false);
this.plugin.oauth2Info.verifier = codeVerifier;
const t = this.t;
const div2 = contentEl.createDiv();
div2.createEl(
"button",
{
text: t("modal_proauth_copybutton"),
},
(el) => {
el.onclick = async () => {
await navigator.clipboard.writeText(authUrl);
new Notice(t("modal_proauth_copynotice"));
};
}
);
contentEl.createEl("p").createEl("a", {
href: authUrl,
text: authUrl,
});
// manual paste
let authCode = "";
new Setting(contentEl)
.setName(t("modal_proauth_maualinput"))
.setDesc(t("modal_proauth_maualinput_desc"))
.addText((text) =>
text
.setPlaceholder("")
.setValue("")
.onChange((val) => {
authCode = val.trim();
})
)
.addButton(async (button) => {
button.setButtonText(t("submit"));
button.onClick(async () => {
new Notice(t("modal_proauth_maualinput_notice"));
try {
const authRes = await sendAuthReq(
codeVerifier ?? "verifier",
authCode,
async (e: any) => {
new Notice(t("protocol_pro_connect_fail"));
new Notice(`${e}`);
throw e;
}
);
console.debug(authRes);
const self = this;
setConfigBySuccessfullAuthInplace(
this.plugin.settings.pro!,
authRes!,
() => self.plugin.saveSettings()
);
await getAndSaveProFeatures(
this.plugin.settings.pro!,
this.plugin.manifest.version,
() => self.plugin.saveSettings()
);
this.proFeaturesListSetting.setDesc(
stringToFragment(
t("settings_pro_features_desc", {
features: featureListToText(
this.plugin.settings.pro!.enabledProFeatures
),
})
)
);
await getAndSaveProEmail(
this.plugin.settings.pro!,
this.plugin.manifest.version,
() => self.plugin.saveSettings()
);
new Notice(
t("protocol_pro_connect_manualinput_succ", {
email: this.plugin.settings.pro!.email ?? "(no email)",
})
);
this.plugin.oauth2Info.verifier = ""; // reset it
this.plugin.oauth2Info.authDiv?.toggleClass(
"pro-auth-button-hide",
this.plugin.settings.pro?.refreshToken !== ""
);
this.plugin.oauth2Info.authDiv = undefined;
this.plugin.oauth2Info.revokeAuthSetting?.setDesc(
t("protocol_pro_connect_succ_revoke", {
email: this.plugin.settings.pro?.email,
})
);
this.plugin.oauth2Info.revokeAuthSetting = undefined;
this.plugin.oauth2Info.revokeDiv?.toggleClass(
"pro-revoke-auth-button-hide",
this.plugin.settings.pro?.email === ""
);
this.plugin.oauth2Info.revokeDiv = undefined;
// try to remove data in clipboard
await navigator.clipboard.writeText("");
this.close();
} catch (err) {
console.error(err);
new Notice(t("modal_proauth_maualinput_conn_fail"));
}
});
});
}
onClose() {
const { contentEl } = this;
contentEl.empty();
}
}
export class ProRevokeAuthModal extends Modal {
readonly plugin: RemotelySavePlugin;
readonly authDiv: HTMLDivElement;
readonly revokeAuthDiv: HTMLDivElement;
readonly t: (x: TransItemType, vars?: any) => string;
constructor(
app: App,
plugin: RemotelySavePlugin,
authDiv: HTMLDivElement,
revokeAuthDiv: HTMLDivElement,
t: (x: TransItemType, vars?: any) => string
) {
super(app);
this.plugin = plugin;
this.authDiv = authDiv;
this.revokeAuthDiv = revokeAuthDiv;
this.t = t;
}
async onOpen() {
const { contentEl } = this;
const t = this.t;
contentEl.createEl("p", {
text: t("modal_prorevokeauth"),
});
new Setting(contentEl)
.setName(t("modal_prorevokeauth_clean"))
.setDesc(t("modal_prorevokeauth_clean_desc"))
.addButton(async (button) => {
button.setButtonText(t("modal_prorevokeauth_clean_button"));
button.onClick(async () => {
try {
this.plugin.settings.pro = cloneDeep(DEFAULT_PRO_CONFIG);
await this.plugin.saveSettings();
this.authDiv.toggleClass(
"pro-auth-button-hide",
this.plugin.settings.pro?.refreshToken !== ""
);
this.revokeAuthDiv.toggleClass(
"pro-revoke-auth-button-hide",
this.plugin.settings.pro?.refreshToken === ""
);
new Notice(t("modal_prorevokeauth_clean_notice"));
this.close();
} catch (err) {
console.error(err);
new Notice(t("modal_prorevokeauth_clean_fail"));
}
});
});
}
onClose() {
const { contentEl } = this;
contentEl.empty();
}
}
const featureListToText = (features: FeatureInfo[]) => {
// TODO: i18n
if (features === undefined || features.length === 0) {
return "No features enabled.";
}
return features
.map((x) => {
return `${x.featureName} (expire: ${new Date(
Number(x.expireAtTimeMs)
).toISOString()})`;
})
.join("<br/>");
};
export const generateProSettingsPart = (
proDiv: HTMLDivElement,
t: (x: TransItemType, vars?: any) => string,
app: App,
plugin: RemotelySavePlugin,
saveUpdatedConfigFunc: () => Promise<any> | undefined,
googleDriveAllowedToUsedDiv: HTMLDivElement,
googleDriveNotShowUpHintSetting: Setting
) => {
proDiv
.createEl("h2", { text: t("settings_pro") })
.setAttribute("id", "settings-pro");
proDiv.createEl("div", {
text: stringToFragment(t("settings_pro_tutorial")),
});
const proSelectAuthDiv = proDiv.createDiv();
const proAuthDiv = proSelectAuthDiv.createDiv({
cls: "pro-auth-button-hide settings-auth-related",
});
const proRevokeAuthDiv = proSelectAuthDiv.createDiv({
cls: "pro-revoke-auth-button-hide settings-auth-related",
});
const proFeaturesListSetting = new Setting(proRevokeAuthDiv)
.setName(t("settings_pro_features"))
.setDesc(
stringToFragment(
t("settings_pro_features_desc", {
features: featureListToText(plugin.settings.pro!.enabledProFeatures),
})
)
);
proFeaturesListSetting.addButton(async (button) => {
button.setButtonText(t("settings_pro_features_refresh_button"));
button.onClick(async () => {
new Notice(t("settings_pro_features_refresh_fetch"));
await getAndSaveProFeatures(
plugin.settings.pro!,
plugin.manifest.version,
saveUpdatedConfigFunc
);
proFeaturesListSetting.setDesc(
stringToFragment(
t("settings_pro_features_desc", {
features: featureListToText(
plugin.settings.pro!.enabledProFeatures
),
})
)
);
const allowGoogleDrive =
plugin.settings.pro?.enabledProFeatures.filter(
(x) => x.featureName === "feature-google_drive"
).length === 1;
console.debug(
`allow to show up google drive settings? ${allowGoogleDrive}`
);
if (allowGoogleDrive) {
googleDriveAllowedToUsedDiv.removeClass(
"googledrive-allow-to-use-hide"
);
googleDriveNotShowUpHintSetting.settingEl.addClass(
"googledrive-allow-to-use-hide"
);
} else {
googleDriveAllowedToUsedDiv.addClass("googledrive-allow-to-use-hide");
googleDriveNotShowUpHintSetting.settingEl.removeClass(
"googledrive-allow-to-use-hide"
);
}
new Notice(t("settings_pro_features_refresh_succ"));
});
});
const proRevokeAuthSetting = new Setting(proRevokeAuthDiv)
.setName(t("settings_pro_revoke"))
.setDesc(
t("settings_pro_revoke_desc", {
email: plugin.settings.pro?.email,
})
)
.addButton(async (button) => {
button.setButtonText(t("settings_pro_revoke_button"));
button.onClick(async () => {
new ProRevokeAuthModal(
app,
plugin,
proAuthDiv,
proRevokeAuthDiv,
t
).open();
});
});
new Setting(proAuthDiv)
.setName(t("settings_pro_intro"))
.setDesc(stringToFragment(t("settings_pro_intro_desc")))
.addButton(async (button) => {
button.setButtonText(t("settings_pro_intro_button"));
button.onClick(async () => {
window.open("https://remotelysave.com/user/signupin", "_self");
});
});
new Setting(proAuthDiv)
.setName(t("settings_pro_auth"))
.setDesc(t("settings_pro_auth_desc"))
.addButton(async (button) => {
button.setButtonText(t("settings_pro_auth_button"));
button.onClick(async () => {
const modal = new ProAuthModal(
app,
plugin,
proAuthDiv,
proRevokeAuthDiv,
proRevokeAuthSetting,
proFeaturesListSetting,
t
);
plugin.oauth2Info.helperModal = modal;
plugin.oauth2Info.authDiv = proAuthDiv;
plugin.oauth2Info.revokeDiv = proRevokeAuthDiv;
plugin.oauth2Info.revokeAuthSetting = proRevokeAuthSetting;
modal.open();
});
});
proAuthDiv.toggleClass(
"pro-auth-button-hide",
plugin.settings.pro?.refreshToken !== ""
);
proRevokeAuthDiv.toggleClass(
"pro-revoke-auth-button-hide",
plugin.settings.pro?.refreshToken === ""
);
};

View File

@ -0,0 +1,68 @@
import { deepStrictEqual, rejects, throws } from "assert";
import { getFileRename } from "../src/conflictLogic";
describe("New name is generated", () => {
it("should throw for empty file", async () => {
for (const key of ["", "/", ".", ".."]) {
throws(() => getFileRename(key));
}
});
it("should throw for folder", async () => {
for (const key of ["sss/", "ssss/yyy/"]) {
throws(() => getFileRename(key));
}
});
it("should correctly get no ext files renamed", async () => {
deepStrictEqual(getFileRename("abc"), "abc.dup");
deepStrictEqual(getFileRename("xxxx/yyyy/abc"), "xxxx/yyyy/abc.dup");
});
it("should correctly get dot files renamed", async () => {
deepStrictEqual(getFileRename(".abc"), ".abc.dup");
deepStrictEqual(getFileRename("xxxx/yyyy/.efg"), "xxxx/yyyy/.efg.dup");
deepStrictEqual(getFileRename("xxxx/yyyy/hij."), "xxxx/yyyy/hij.dup");
});
it("should correctly get normal files renamed", async () => {
deepStrictEqual(getFileRename("abc.efg"), "abc.dup.efg");
deepStrictEqual(
getFileRename("xxxx/yyyy/abc.efg"),
"xxxx/yyyy/abc.dup.efg"
);
deepStrictEqual(
getFileRename("xxxx/yyyy/abc.tar.gz"),
"xxxx/yyyy/abc.tar.dup.gz"
);
deepStrictEqual(
getFileRename("xxxx/yyyy/.abc.efg"),
"xxxx/yyyy/.abc.dup.efg"
);
});
it("should correctly get duplicated files renamed again", async () => {
deepStrictEqual(getFileRename("abc.dup"), "abc.dup.dup");
deepStrictEqual(
getFileRename("xxxx/yyyy/.abc.dup"),
"xxxx/yyyy/.abc.dup.dup"
);
deepStrictEqual(
getFileRename("xxxx/yyyy/abc.dup.md"),
"xxxx/yyyy/abc.dup.dup.md"
);
deepStrictEqual(
getFileRename("xxxx/yyyy/.abc.dup.md"),
"xxxx/yyyy/.abc.dup.dup.md"
);
});
});

201
src/LICENSE Normal file
View File

@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

9
src/README.md Normal file
View File

@ -0,0 +1,9 @@
# Main Basic Source
## What?
The main basic source code for Remotely Save.
## License
The codes or files or subfolders inside the current folder (`src` in the repo), are released under "open source" license: "Apache License, version 2.0".

View File

@ -3,17 +3,25 @@
* To avoid circular dependency.
*/
import { Platform, requireApiVersion } from "obsidian";
import type { LangType, LangTypeAndAuto } from "./i18n";
import type { GoogleDriveConfig, ProConfig } from "../pro/src/baseTypesPro";
import type { LangTypeAndAuto } from "./i18n";
export const DEFAULT_CONTENT_TYPE = "application/octet-stream";
export type SUPPORTED_SERVICES_TYPE = "s3" | "webdav" | "dropbox" | "onedrive";
export type SUPPORTED_SERVICES_TYPE =
| "s3"
| "webdav"
| "dropbox"
| "onedrive"
| "webdis"
| "googledrive";
export type SUPPORTED_SERVICES_TYPE_WITH_REMOTE_BASE_DIR =
| "webdav"
| "dropbox"
| "onedrive";
| "onedrive"
| "webdis"
| "googledrive";
export interface S3Config {
s3Endpoint: string;
@ -84,6 +92,14 @@ export interface OnedriveConfig {
username: string;
credentialsShouldBeDeletedAtTime?: number;
remoteBaseDir?: string;
emptyFile: "skip" | "error";
}
export interface WebdisConfig {
address: string;
username?: string;
password?: string;
remoteBaseDir?: string;
}
export type SyncDirectionType =
@ -93,13 +109,27 @@ export type SyncDirectionType =
export type CipherMethodType = "rclone-base64" | "openssl-base64" | "unknown";
export type QRExportType = "all_but_oauth2" | "dropbox" | "onedrive";
export type QRExportType =
| "basic_and_advanced"
| "s3"
| "dropbox"
| "onedrive"
| "webdav"
| "webdis"
| "googledrive";
export interface ProfilerConfig {
enablePrinting?: boolean;
recordSize?: boolean;
}
export interface RemotelySavePluginSettings {
s3: S3Config;
webdav: WebdavConfig;
dropbox: DropboxConfig;
onedrive: OnedriveConfig;
webdis: WebdisConfig;
googledrive: GoogleDriveConfig;
password: string;
serviceType: SUPPORTED_SERVICES_TYPE;
currLogLevel?: string;
@ -128,6 +158,10 @@ export interface RemotelySavePluginSettings {
encryptionMethod?: CipherMethodType;
profiler?: ProfilerConfig;
pro?: ProConfig;
/**
* @deprecated
*/
@ -161,7 +195,10 @@ export const OAUTH2_FORCE_EXPIRE_MILLISECONDS = 1000 * 60 * 60 * 24 * 80;
export type EmptyFolderCleanType = "skip" | "clean_both";
export type ConflictActionType = "keep_newer" | "keep_larger" | "rename_both";
export type ConflictActionType =
| "keep_newer"
| "keep_larger"
| "smart_conflict";
export type DecisionTypeForMixedEntity =
| "only_history"
@ -176,11 +213,11 @@ export type DecisionTypeForMixedEntity =
| "remote_is_deleted_thus_also_delete_local"
| "conflict_created_then_keep_local"
| "conflict_created_then_keep_remote"
| "conflict_created_then_keep_both"
| "conflict_created_then_smart_conflict"
| "conflict_created_then_do_nothing"
| "conflict_modified_then_keep_local"
| "conflict_modified_then_keep_remote"
| "conflict_modified_then_keep_both"
| "conflict_modified_then_smart_conflict"
| "folder_existed_both_then_do_nothing"
| "folder_existed_local_then_also_create_remote"
| "folder_existed_remote_then_also_create_local"
@ -210,6 +247,7 @@ export interface Entity {
hash?: string;
etag?: string;
synthesizedFolder?: boolean;
synthesizedFile?: boolean;
}
export interface UploadedType {
@ -230,6 +268,8 @@ export interface MixedEntity {
decision?: DecisionTypeForMixedEntity;
conflictAction?: ConflictActionType;
change?: boolean;
sideNotes?: any;
}
@ -261,15 +301,6 @@ export interface FileOrFolderMixedState {
deltimeRemoteFmt?: string;
}
export const API_VER_STAT_FOLDER = "0.13.27";
export const API_VER_REQURL = "0.13.26"; // desktop ver 0.13.26, iOS ver 1.1.1
export const API_VER_REQURL_ANDROID = "0.14.6"; // Android ver 1.2.1
export const API_VER_ENSURE_REQURL_OK = "1.0.0"; // always bypass CORS here
export const VALID_REQURL =
(!Platform.isAndroidApp && requireApiVersion(API_VER_REQURL)) ||
(Platform.isAndroidApp && requireApiVersion(API_VER_REQURL_ANDROID));
export const DEFAULT_DEBUG_FOLDER = "_debug_remotely_save/";
export const DEFAULT_SYNC_PLANS_HISTORY_FILE_PREFIX =
"sync_plans_hist_exported_on_";

14
src/baseTypesObs.ts Normal file
View File

@ -0,0 +1,14 @@
/**
* Every utils requiring Obsidian is placed here.
*/
import { Platform, requireApiVersion } from "obsidian";
export const API_VER_STAT_FOLDER = "0.13.27";
export const API_VER_REQURL = "0.13.26"; // desktop ver 0.13.26, iOS ver 1.1.1
export const API_VER_REQURL_ANDROID = "0.14.6"; // Android ver 1.2.1
export const API_VER_ENSURE_REQURL_OK = "1.0.0"; // always bypass CORS here
export const VALID_REQURL =
(!Platform.isAndroidApp && requireApiVersion(API_VER_REQURL)) ||
(Platform.isAndroidApp && requireApiVersion(API_VER_REQURL_ANDROID));

View File

@ -1,4 +1,4 @@
import { base64, base64url } from "rfc4648";
import { base64url } from "rfc4648";
import { reverseString } from "./misc";
import type { RemotelySavePluginSettings } from "./baseTypes";

60
src/copyLogic.ts Normal file
View File

@ -0,0 +1,60 @@
import type { FakeFs } from "./fsAll";
export async function copyFolder(key: string, left: FakeFs, right: FakeFs) {
if (!key.endsWith("/")) {
throw Error(`should not call ${key} in copyFolder`);
}
const statsLeft = await left.stat(key);
const entity = await right.mkdir(key, statsLeft.mtimeCli);
return {
entity: entity,
content: undefined,
};
}
export async function copyFile(key: string, left: FakeFs, right: FakeFs) {
// console.debug(`copyFile: key=${key}, left=${left.kind}, right=${right.kind}`);
if (key.endsWith("/")) {
throw Error(`should not call ${key} in copyFile`);
}
const statsLeft = await left.stat(key);
const content = await left.readFile(key);
if (statsLeft.size === undefined || statsLeft.size === 0) {
// some weird bugs on android not returning size. just ignore them
statsLeft.size = content.byteLength;
} else {
if (statsLeft.size !== content.byteLength) {
throw Error(
`error copying ${left.kind}=>${right.kind}: size not matched`
);
}
}
if (statsLeft.mtimeCli === undefined) {
throw Error(`error copying ${left.kind}=>${right.kind}, no mtimeCli`);
}
// console.debug(`copyFile: about to start right.writeFile`);
return {
entity: await right.writeFile(
key,
content,
statsLeft.mtimeCli,
statsLeft.mtimeCli /* TODO */
),
content: content,
};
}
export async function copyFileOrFolder(
key: string,
left: FakeFs,
right: FakeFs
) {
if (key.endsWith("/")) {
return await copyFolder(key, left, right);
} else {
return await copyFile(key, left, right);
}
}

View File

@ -1,22 +1,40 @@
import { TAbstractFile, TFolder, TFile, Vault } from "obsidian";
import type { Vault } from "obsidian";
import {
readAllProfilerResultsByVault,
readAllSyncPlanRecordTextsByVault,
} from "./localdb";
import type { InternalDBs } from "./localdb";
import { mkdirpInVault, unixTimeToStr } from "./misc";
import {
DEFAULT_DEBUG_FOLDER,
DEFAULT_PROFILER_RESULT_FILE_PREFIX,
DEFAULT_SYNC_PLANS_HISTORY_FILE_PREFIX,
} from "./baseTypes";
import {
readAllProfilerResultsByVault,
readAllSyncPlanRecordTextsByVault,
} from "./localdb";
import type { InternalDBs } from "./localdb";
import { mkdirpInVault } from "./misc";
import type { SyncPlanType } from "./sync";
const getSubsetOfSyncPlan = (x: string, onlyChange: boolean) => {
if (!onlyChange) {
return x;
}
const y: SyncPlanType = JSON.parse(x);
const z: SyncPlanType = Object.fromEntries(
Object.entries(y).filter(([key, val]) => {
if (key === "/$@meta") {
return true;
}
return val.change === undefined || val.change === true;
})
);
return JSON.stringify(z, null, 2);
};
export const exportVaultSyncPlansToFiles = async (
db: InternalDBs,
vault: Vault,
vaultRandomID: string,
howMany: number
howMany: number,
onlyChange: boolean
) => {
console.info("exporting sync plans");
await mkdirpInVault(DEFAULT_DEBUG_FOLDER, vault);
@ -28,12 +46,18 @@ export const exportVaultSyncPlansToFiles = async (
if (howMany <= 0) {
md =
"Sync plans found:\n\n" +
records.map((x) => "```json\n" + x + "\n```\n").join("\n");
records
.map(
(x) => "```json\n" + getSubsetOfSyncPlan(x, onlyChange) + "\n```\n"
)
.join("\n");
} else {
md =
"Sync plans found:\n\n" +
records
.map((x) => "```json\n" + x + "\n```\n")
.map(
(x) => "```json\n" + getSubsetOfSyncPlan(x, onlyChange) + "\n```\n"
)
.slice(0, howMany)
.join("\n");
}

View File

@ -39,7 +39,7 @@ export const encryptArrayBuffer = async (
arrBuf: ArrayBuffer,
password: string,
rounds: number = DEFAULT_ITER,
saltHex: string = ""
saltHex = ""
) => {
let salt: Uint8Array;
if (saltHex !== "") {
@ -109,7 +109,7 @@ export const encryptStringToBase32 = async (
text: string,
password: string,
rounds: number = DEFAULT_ITER,
saltHex: string = ""
saltHex = ""
) => {
const enc = await encryptArrayBuffer(
bufferToArrayBuffer(new TextEncoder().encode(text)),
@ -138,7 +138,7 @@ export const encryptStringToBase64url = async (
text: string,
password: string,
rounds: number = DEFAULT_ITER,
saltHex: string = ""
saltHex = ""
) => {
const enc = await encryptArrayBuffer(
bufferToArrayBuffer(new TextEncoder().encode(text)),

View File

@ -1,5 +1,5 @@
import { nanoid } from "nanoid";
import { Cipher as CipherRCloneCryptPack } from "@fyears/rclone-crypt";
import { nanoid } from "nanoid";
const ctx: WorkerGlobalScope = self as any;

View File

@ -1,8 +1,9 @@
import { Entity } from "./baseTypes";
import type { Entity } from "./baseTypes";
export abstract class FakeFs {
abstract kind: string;
abstract walk(): Promise<Entity[]>;
abstract walkPartial(): Promise<Entity[]>;
abstract stat(key: string): Promise<Entity>;
abstract mkdir(key: string, mtime?: number, ctime?: number): Promise<Entity>;
abstract writeFile(
@ -12,8 +13,10 @@ export abstract class FakeFs {
ctime: number
): Promise<Entity>;
abstract readFile(key: string): Promise<ArrayBuffer>;
abstract rename(key1: string, key2: string): Promise<void>;
abstract rm(key: string): Promise<void>;
abstract checkConnect(callbackFunc?: any): Promise<boolean>;
abstract getUserDisplayName(): Promise<string>;
abstract revokeAuth(): Promise<any>;
abstract allowEmptyFile(): boolean;
}

View File

@ -1,13 +1,13 @@
import { FakeFs } from "./fsAll";
import { Dropbox, DropboxAuth } from "dropbox";
import type { files, DropboxResponseError, DropboxResponse } from "dropbox";
import {
DropboxConfig,
COMMAND_CALLBACK_DROPBOX,
OAUTH2_FORCE_EXPIRE_MILLISECONDS,
Entity,
} from "./baseTypes";
import type { DropboxResponse, DropboxResponseError, files } from "dropbox";
import random from "lodash/random";
import {
COMMAND_CALLBACK_DROPBOX,
type DropboxConfig,
type Entity,
OAUTH2_FORCE_EXPIRE_MILLISECONDS,
} from "./baseTypes";
import { FakeFs } from "./fsAll";
import {
bufferToArrayBuffer,
delay,
@ -134,7 +134,7 @@ export const fixEntityListCasesInplace = (entities: { key?: string }[]) => {
caseMapping[newKey.toLocaleLowerCase()] = newKey;
e.key = newKey;
// console.log(JSON.stringify(caseMapping,null,2));
continue;
// continue;
} else {
throw Error(`${parentFolder} doesn't have cases record??`);
}
@ -145,7 +145,7 @@ export const fixEntityListCasesInplace = (entities: { key?: string }[]) => {
.slice(-1)
.join("/")}`;
e.key = newKey;
continue;
// continue;
} else {
throw Error(`${parentFolder} doesn't have cases record??`);
}
@ -167,7 +167,7 @@ interface ErrSubType {
async function retryReq<T>(
reqFunc: () => Promise<DropboxResponse<T>>,
extraHint: string = ""
extraHint = ""
): Promise<DropboxResponse<T> | undefined> {
const waitSeconds = [1, 2, 4, 8]; // hard code exponential backoff
for (let idx = 0; idx < waitSeconds.length; ++idx) {
@ -205,7 +205,7 @@ async function retryReq<T>(
const headers = headersToRecord(err.headers);
const svrSec =
err.error.error.retry_after ||
parseInt(headers["retry-after"] || "1") ||
Number.parseInt(headers["retry-after"] || "1") ||
1;
const fallbackSec = waitSeconds[idx];
const secMin = Math.max(svrSec, fallbackSec);
@ -233,7 +233,7 @@ async function retryReq<T>(
export const getAuthUrlAndVerifier = async (
appKey: string,
needManualPatse: boolean = false
needManualPatse = false
) => {
const auth = new DropboxAuth({
clientId: appKey,
@ -328,9 +328,9 @@ export const setConfigBySuccessfullAuthInplace = async (
console.info("start updating local info of Dropbox token");
config.accessToken = authRes.access_token;
config.accessTokenExpiresInSeconds = parseInt(authRes.expires_in);
config.accessTokenExpiresInSeconds = Number.parseInt(authRes.expires_in);
config.accessTokenExpiresAtTime =
Date.now() + parseInt(authRes.expires_in) * 1000 - 10 * 1000;
Date.now() + Number.parseInt(authRes.expires_in) * 1000 - 10 * 1000;
// manually set it expired after 80 days;
config.credentialsShouldBeDeletedAtTime =
@ -452,13 +452,21 @@ export class FakeFsDropbox extends FakeFs {
}
async walk(): Promise<Entity[]> {
return await this._walk(false);
}
async walkPartial(): Promise<Entity[]> {
return await this._walk(true);
}
async _walk(partial: boolean): Promise<Entity[]> {
await this._init();
let res = await this.dropbox.filesListFolder({
path: `/${this.remoteBaseDir}`,
recursive: true,
recursive: !partial,
include_deleted: false,
limit: 1000,
limit: partial ? 10 : 1000,
});
if (res.status !== 200) {
throw Error(JSON.stringify(res));
@ -471,20 +479,22 @@ export class FakeFsDropbox extends FakeFs {
.filter((x) => x.path_display !== `/${this.remoteBaseDir}`)
.map((x) => fromDropboxItemToEntity(x, this.remoteBaseDir));
while (res.result.has_more) {
res = await this.dropbox.filesListFolderContinue({
cursor: res.result.cursor,
});
if (res.status !== 200) {
throw Error(JSON.stringify(res));
}
if (!partial) {
while (res.result.has_more) {
res = await this.dropbox.filesListFolderContinue({
cursor: res.result.cursor,
});
if (res.status !== 200) {
throw Error(JSON.stringify(res));
}
const contents2 = res.result.entries;
const unifiedContents2 = contents2
.filter((x) => x[".tag"] !== "deleted")
.filter((x) => x.path_display !== `/${this.remoteBaseDir}`)
.map((x) => fromDropboxItemToEntity(x, this.remoteBaseDir));
unifiedContents.push(...unifiedContents2);
const contents2 = res.result.entries;
const unifiedContents2 = contents2
.filter((x) => x[".tag"] !== "deleted")
.filter((x) => x.path_display !== `/${this.remoteBaseDir}`)
.map((x) => fromDropboxItemToEntity(x, this.remoteBaseDir));
unifiedContents.push(...unifiedContents2);
}
}
fixEntityListCasesInplace(unifiedContents);
@ -685,6 +695,25 @@ export class FakeFsDropbox extends FakeFs {
}
}
async rename(key1: string, key2: string): Promise<void> {
const remoteFileName1 = getDropboxPath(key1, this.remoteBaseDir);
const remoteFileName2 = getDropboxPath(key2, this.remoteBaseDir);
await this._init();
try {
await retryReq(
() =>
this.dropbox.filesMoveV2({
from_path: remoteFileName1,
to_path: remoteFileName2,
}),
`${key1}=>${key2}` // just a hint here
);
} catch (err) {
console.error("some error while moving");
console.error(err);
}
}
async rm(key: string): Promise<void> {
if (key === "/") {
return;
@ -736,4 +765,8 @@ export class FakeFsDropbox extends FakeFs {
return false;
}
}
allowEmptyFile(): boolean {
return true;
}
}

View File

@ -1,10 +1,10 @@
import { CipherMethodType, Entity } from "./baseTypes";
import type { CipherMethodType, Entity } from "./baseTypes";
import * as openssl from "./encryptOpenSSL";
import * as rclone from "./encryptRClone";
import { isVaildText } from "./misc";
import { FakeFs } from "./fsAll";
import cloneDeep from "lodash/cloneDeep";
import { FakeFs } from "./fsAll";
/**
* quick guess, no actual decryption here
@ -78,8 +78,6 @@ export class FakeFsEncrypt extends FakeFs {
cacheMapOrigToEnc: Record<string, string>;
hasCacheMap: boolean;
kind: string;
innerWalkResultCache?: Entity[];
innerWalkResultCacheTime?: number;
constructor(innerFs: FakeFs, password: string, method: CipherMethodType) {
super();
@ -89,7 +87,9 @@ export class FakeFsEncrypt extends FakeFs {
this.cacheMapOrigToEnc = {};
this.hasCacheMap = false;
this.kind = `encrypt(${this.innerFs.kind},${method})`;
this.kind = `encrypt(${this.innerFs.kind},${
this.password !== "" ? method : "no password"
})`;
if (method === "rclone-base64") {
this.cipherRClone = new rclone.CipherRclone(password, 5);
@ -110,26 +110,8 @@ export class FakeFsEncrypt extends FakeFs {
throw Error(`no idea about isFolderAware for method=${this.method}`);
}
/**
* we want a little caching here.
*/
async _getInnerWalkResult(): Promise<Entity[]> {
let innerWalkResult: Entity[] | undefined = undefined;
if (
this.innerWalkResultCacheTime !== undefined &&
this.innerWalkResultCacheTime >= Date.now() - 1000
) {
innerWalkResult = this.innerWalkResultCache!;
} else {
innerWalkResult = await this.innerFs.walk();
this.innerWalkResultCache = innerWalkResult;
this.innerWalkResultCacheTime = Date.now();
}
return innerWalkResult;
}
async isPasswordOk(): Promise<PasswordCheckType> {
const innerWalkResult = await this._getInnerWalkResult();
const innerWalkResult = await this.walkPartial();
if (innerWalkResult === undefined || innerWalkResult.length === 0) {
// remote empty
@ -186,8 +168,16 @@ export class FakeFsEncrypt extends FakeFs {
}
async walk(): Promise<Entity[]> {
const innerWalkResult = await this._getInnerWalkResult();
const innerWalkResult = await this.innerFs.walk();
return await this._dealWithWalk(innerWalkResult);
}
async walkPartial(): Promise<Entity[]> {
const innerWalkResult = await this.innerFs.walkPartial();
return await this._dealWithWalk(innerWalkResult);
}
async _dealWithWalk(innerWalkResult: Entity[]): Promise<Entity[]> {
const res: Entity[] = [];
if (this.isPasswordEmpty()) {
@ -273,9 +263,13 @@ export class FakeFsEncrypt extends FakeFs {
return copyEntityAndCopyKeyEncSizeEnc(innerEntity);
} else {
const now = Date.now();
let content = new ArrayBuffer(0);
if (!this.innerFs.allowEmptyFile()) {
content = new ArrayBuffer(1);
}
const innerEntity = await this.innerFs.writeFile(
keyEnc,
new ArrayBuffer(0),
content,
mtime ?? now,
ctime ?? now
);
@ -362,6 +356,31 @@ export class FakeFsEncrypt extends FakeFs {
}
}
async rename(key1: string, key2: string): Promise<void> {
if (!this.hasCacheMap) {
throw new Error("You have to build the cacheMap firstly for readFile");
}
let key1Enc = this.cacheMapOrigToEnc[key1];
if (key1Enc === undefined) {
if (this.isPasswordEmpty()) {
key1Enc = key1;
} else {
key1Enc = await this._encryptName(key1);
}
this.cacheMapOrigToEnc[key1] = key1Enc;
}
let key2Enc = this.cacheMapOrigToEnc[key2];
if (key2Enc === undefined) {
if (this.isPasswordEmpty()) {
key2Enc = key2;
} else {
key2Enc = await this._encryptName(key2);
}
this.cacheMapOrigToEnc[key2] = key2Enc;
}
return await this.innerFs.rename(key1Enc, key2Enc);
}
async rm(key: string): Promise<void> {
if (!this.hasCacheMap) {
throw new Error("You have to build the cacheMap firstly for rm");
@ -554,4 +573,8 @@ export class FakeFsEncrypt extends FakeFs {
async revokeAuth(): Promise<any> {
return await this.innerFs.revokeAuth();
}
allowEmptyFile(): boolean {
return true;
}
}

View File

@ -1,9 +1,11 @@
import { RemotelySavePluginSettings } from "./baseTypes";
import { FakeFs } from "./fsAll";
import { FakeFsGoogleDrive } from "../pro/src/fsGoogleDrive";
import type { RemotelySavePluginSettings } from "./baseTypes";
import type { FakeFs } from "./fsAll";
import { FakeFsDropbox } from "./fsDropbox";
import { FakeFsOnedrive } from "./fsOnedrive";
import { FakeFsS3 } from "./fsS3";
import { FakeFsWebdav } from "./fsWebdav";
import { FakeFsWebdis } from "./fsWebdis";
/**
* To avoid circular dependency, we need a new file here.
@ -16,30 +18,37 @@ export function getClient(
switch (settings.serviceType) {
case "s3":
return new FakeFsS3(settings.s3);
break;
case "webdav":
return new FakeFsWebdav(
settings.webdav,
vaultName,
saveUpdatedConfigFunc
);
break;
case "dropbox":
return new FakeFsDropbox(
settings.dropbox,
vaultName,
saveUpdatedConfigFunc
);
break;
case "onedrive":
return new FakeFsOnedrive(
settings.onedrive,
vaultName,
saveUpdatedConfigFunc
);
break;
case "webdis":
return new FakeFsWebdis(
settings.webdis,
vaultName,
saveUpdatedConfigFunc
);
case "googledrive":
return new FakeFsGoogleDrive(
settings.googledrive,
vaultName,
saveUpdatedConfigFunc
);
default:
throw Error(`cannot init client for serviceType=${settings.serviceType}`);
break;
}
}

View File

@ -1,17 +1,17 @@
import { DEFAULT_DEBUG_FOLDER, Entity } from "./baseTypes";
import { DEFAULT_DEBUG_FOLDER, type Entity } from "./baseTypes";
import { FakeFs } from "./fsAll";
import { TFile, TFolder, type Vault } from "obsidian";
import { mkdirpInVault, statFix, unixTimeToStr } from "./misc";
import { listFilesInObsFolder } from "./obsFolderLister";
import { Profiler } from "./profiler";
import { getFolderLevels, mkdirpInVault, statFix } from "./misc";
import type { Profiler } from "./profiler";
export class FakeFsLocal extends FakeFs {
vault: Vault;
syncConfigDir: boolean;
configDir: string;
pluginID: string;
profiler: Profiler;
profiler: Profiler | undefined;
deleteToWhere: "obsidian" | "system";
kind: "local";
constructor(
@ -19,7 +19,7 @@ export class FakeFsLocal extends FakeFs {
syncConfigDir: boolean,
configDir: string,
pluginID: string,
profiler: Profiler,
profiler: Profiler | undefined,
deleteToWhere: "obsidian" | "system"
) {
super();
@ -34,12 +34,12 @@ export class FakeFsLocal extends FakeFs {
}
async walk(): Promise<Entity[]> {
this.profiler.addIndent();
this.profiler.insert("enter walk for local");
this.profiler?.addIndent();
this.profiler?.insert("enter walk for local");
const local: Entity[] = [];
const localTAbstractFiles = this.vault.getAllLoadedFiles();
this.profiler.insert("finish getting walk for local");
this.profiler?.insert("finish getting walk for local");
for (const entry of localTAbstractFiles) {
let r: Entity | undefined = undefined;
let key = entry.path;
@ -83,16 +83,16 @@ export class FakeFsLocal extends FakeFs {
if (r.keyRaw.startsWith(DEFAULT_DEBUG_FOLDER)) {
// skip listing the debug folder,
// which should always not involved in sync
continue;
// continue;
} else {
local.push(r);
}
}
this.profiler.insert("finish transforming walk for local");
this.profiler?.insert("finish transforming walk for local");
if (this.syncConfigDir) {
this.profiler.insert("into syncConfigDir");
this.profiler?.insert("into syncConfigDir");
const syncFiles = await listFilesInObsFolder(
this.configDir,
this.vault,
@ -101,14 +101,18 @@ export class FakeFsLocal extends FakeFs {
for (const f of syncFiles) {
local.push(f);
}
this.profiler.insert("finish syncConfigDir");
this.profiler?.insert("finish syncConfigDir");
}
this.profiler.insert("finish walk for local");
this.profiler.removeIndent();
this.profiler?.insert("finish walk for local");
this.profiler?.removeIndent();
return local;
}
async walkPartial(): Promise<Entity[]> {
return await this.walk();
}
async stat(key: string): Promise<Entity> {
const statRes = await statFix(this.vault, key);
if (statRes === undefined || statRes === null) {
@ -120,6 +124,8 @@ export class FakeFsLocal extends FakeFs {
keyRaw: isFolder ? `${key}/` : key,
mtimeCli: statRes.mtime,
mtimeSvr: statRes.mtime,
mtimeCliFmt: unixTimeToStr(statRes.mtime),
mtimeSvrFmt: unixTimeToStr(statRes.mtime),
size: statRes.size, // local always unencrypted
sizeRaw: statRes.size,
};
@ -139,6 +145,7 @@ export class FakeFsLocal extends FakeFs {
): Promise<Entity> {
await this.vault.adapter.writeBinary(key, content, {
mtime: mtime,
ctime: ctime,
});
return await this.stat(key);
}
@ -147,6 +154,10 @@ export class FakeFsLocal extends FakeFs {
return await this.vault.adapter.readBinary(key);
}
async rename(key1: string, key2: string): Promise<void> {
return await this.vault.adapter.rename(key1, key2);
}
async rm(key: string): Promise<void> {
if (this.deleteToWhere === "obsidian") {
await this.vault.adapter.trashLocal(key);
@ -168,4 +179,8 @@ export class FakeFsLocal extends FakeFs {
async revokeAuth(): Promise<any> {
throw new Error("Method not implemented.");
}
allowEmptyFile(): boolean {
return true;
}
}

View File

@ -1,4 +1,4 @@
import { Entity } from "./baseTypes";
import type { Entity } from "./baseTypes";
import { FakeFs } from "./fsAll";
export class FakeFsMock extends FakeFs {
@ -13,6 +13,10 @@ export class FakeFsMock extends FakeFs {
throw new Error("Method not implemented.");
}
async walkPartial(): Promise<Entity[]> {
return await this.walk();
}
async stat(key: string): Promise<Entity> {
throw new Error("Method not implemented.");
}
@ -34,6 +38,10 @@ export class FakeFsMock extends FakeFs {
throw new Error("Method not implemented.");
}
async rename(key1: string, key2: string): Promise<void> {
throw new Error("Method not implemented.");
}
async rm(key: string): Promise<void> {
throw new Error("Method not implemented.");
}
@ -49,4 +57,8 @@ export class FakeFsMock extends FakeFs {
async revokeAuth(): Promise<any> {
throw new Error("Method not implemented.");
}
allowEmptyFile(): boolean {
throw new Error("Method not implemented.");
}
}

View File

@ -1,21 +1,21 @@
import { CryptoProvider, PublicClientApplication } from "@azure/msal-node";
import type { AuthenticationProvider } from "@microsoft/microsoft-graph-client";
import type {
DriveItem,
FileSystemInfo,
UploadSession,
User,
} from "@microsoft/microsoft-graph-types";
import { CryptoProvider, PublicClientApplication } from "@azure/msal-node";
import { AuthenticationProvider } from "@microsoft/microsoft-graph-client";
import cloneDeep from "lodash/cloneDeep";
import { request, requestUrl } from "obsidian";
import {
COMMAND_CALLBACK_ONEDRIVE,
DEFAULT_CONTENT_TYPE,
Entity,
type Entity,
OAUTH2_FORCE_EXPIRE_MILLISECONDS,
OnedriveConfig,
VALID_REQURL,
type OnedriveConfig,
} from "./baseTypes";
import { VALID_REQURL } from "./baseTypesObs";
import { FakeFs } from "./fsAll";
import { bufferToArrayBuffer } from "./misc";
@ -32,6 +32,7 @@ export const DEFAULT_ONEDRIVE_CONFIG: OnedriveConfig = {
deltaLink: "",
username: "",
credentialsShouldBeDeletedAtTime: 0,
emptyFile: "skip",
};
////////////////////////////////////////////////////////////////////////////////
@ -344,6 +345,7 @@ const fromDriveItemToEntity = (x: DriveItem, remoteBaseDir: string): Entity => {
mtimeCli: mtimeCli,
size: isFolder ? 0 : x.size!,
sizeRaw: isFolder ? 0 : x.size!,
synthesizedFile: false,
// hash: ?? // TODO
};
};
@ -567,6 +569,7 @@ export class FakeFsOnedrive extends FakeFs {
// TODO:
// 20220401: On Android, requestUrl has issue that text becomes base64.
// Use fetch everywhere instead!
// biome-ignore lint/correctness/noConstantCondition: hard code
if (false /*VALID_REQURL*/) {
const res = await requestUrl({
url: theUrl,
@ -617,6 +620,7 @@ export class FakeFsOnedrive extends FakeFs {
// TODO:
// 20220401: On Android, requestUrl has issue that text becomes base64.
// Use fetch everywhere instead!
// biome-ignore lint/correctness/noConstantCondition: hard code
if (false /*VALID_REQURL*/) {
const res = await requestUrl({
url: theUrl,
@ -658,7 +662,7 @@ export class FakeFsOnedrive extends FakeFs {
let res = await this._getJson(
`/drive/special/approot:/${this.remoteBaseDir}:/delta`
);
let driveItems = res.value as DriveItem[];
const driveItems = res.value as DriveItem[];
// console.debug(driveItems);
while (NEXT_LINK_KEY in res) {
@ -680,6 +684,31 @@ export class FakeFsOnedrive extends FakeFs {
return unifiedContents;
}
async walkPartial(): Promise<Entity[]> {
await this._init();
const DELTA_LINK_KEY = "@odata.deltaLink";
const res = await this._getJson(
`/drive/special/approot:/${this.remoteBaseDir}:/delta`
);
const driveItems = res.value as DriveItem[];
// console.debug(driveItems);
// lastly we should have delta link?
if (DELTA_LINK_KEY in res) {
this.onedriveConfig.deltaLink = res[DELTA_LINK_KEY];
await this.saveUpdatedConfigFunc();
}
// unify everything to Entity
const unifiedContents = driveItems
.map((x) => fromDriveItemToEntity(x, this.remoteBaseDir))
.filter((x) => x.key !== "/");
return unifiedContents;
}
async stat(key: string): Promise<Entity> {
await this._init();
return await this._statFromRoot(getOnedrivePath(key, this.remoteBaseDir));
@ -719,7 +748,7 @@ export class FakeFsOnedrive extends FakeFs {
} else {
// https://stackoverflow.com/questions/56479865/creating-nested-folders-in-one-go-onedrive-api
// use PATCH to create folder recursively!!!
let playload: any = {
const playload: any = {
folder: {},
"@microsoft.graph.conflictBehavior": "replace",
};
@ -758,7 +787,8 @@ export class FakeFsOnedrive extends FakeFs {
content,
mtime,
ctime,
key
key,
this.onedriveConfig.emptyFile
);
}
@ -767,12 +797,26 @@ export class FakeFsOnedrive extends FakeFs {
content: ArrayBuffer,
mtime: number,
ctime: number,
origKey: string
origKey: string,
emptyFile: "skip" | "error"
): Promise<Entity> {
if (content.byteLength === 0) {
throw Error(
`${origKey}: Empty file is not allowed in OneDrive, and please write something in it.`
);
if (emptyFile === "error") {
throw Error(
`${origKey}: Empty file is not allowed in OneDrive, and please write something in it.`
);
} else {
return {
key: origKey,
keyRaw: origKey,
mtimeSvr: mtime,
mtimeCli: mtime,
size: 0,
sizeRaw: 0,
synthesizedFile: true,
// hash: ?? // TODO
};
}
}
const ctimeStr = new Date(ctime).toISOString();
@ -884,6 +928,18 @@ export class FakeFsOnedrive extends FakeFs {
}
}
async rename(key1: string, key2: string): Promise<void> {
if (key1 === "" || key1 === "/" || key2 === "" || key2 === "/") {
return;
}
const remoteFileName1 = getOnedrivePath(key1, this.remoteBaseDir);
const remoteFileName2 = getOnedrivePath(key2, this.remoteBaseDir);
await this._init();
await this._patchJson(remoteFileName1, {
name: remoteFileName2,
});
}
async rm(key: string): Promise<void> {
if (key === "" || key === "/") {
return;
@ -926,4 +982,8 @@ export class FakeFsOnedrive extends FakeFs {
async getRevokeAddr() {
return "https://account.live.com/consent/Manage";
}
allowEmptyFile(): boolean {
return false;
}
}

View File

@ -1,35 +1,37 @@
import type { _Object, PutObjectCommandInput } from "@aws-sdk/client-s3";
import { Buffer } from "buffer";
import * as path from "path";
import { Readable } from "stream";
import type { PutObjectCommandInput, _Object } from "@aws-sdk/client-s3";
import {
DeleteObjectCommand,
GetObjectCommand,
HeadObjectCommand,
HeadObjectCommandOutput,
type HeadObjectCommandOutput,
ListObjectsV2Command,
ListObjectsV2CommandInput,
type ListObjectsV2CommandInput,
PutObjectCommand,
S3Client,
} from "@aws-sdk/client-s3";
import { Upload } from "@aws-sdk/lib-storage";
import { HttpRequest, HttpResponse } from "@smithy/protocol-http";
import type { HttpHandlerOptions } from "@aws-sdk/types";
import {
FetchHttpHandler,
FetchHttpHandlerOptions,
type FetchHttpHandlerOptions,
} from "@smithy/fetch-http-handler";
// @ts-ignore
import { requestTimeout } from "@smithy/fetch-http-handler/dist-es/request-timeout";
import { type HttpRequest, HttpResponse } from "@smithy/protocol-http";
import { buildQueryString } from "@smithy/querystring-builder";
import { HttpHandlerOptions } from "@aws-sdk/types";
import { Buffer } from "buffer";
import * as mime from "mime-types";
import { Platform, requestUrl, RequestUrlParam } from "obsidian";
import { Readable } from "stream";
import * as path from "path";
// biome-ignore lint/suspicious/noShadowRestrictedNames: <explanation>
import AggregateError from "aggregate-error";
import { DEFAULT_CONTENT_TYPE, S3Config, VALID_REQURL } from "./baseTypes";
import { bufferToArrayBuffer, getFolderLevels } from "./misc";
import * as mime from "mime-types";
import { Platform, type RequestUrlParam, requestUrl } from "obsidian";
import PQueue from "p-queue";
import { DEFAULT_CONTENT_TYPE, type S3Config } from "./baseTypes";
import { VALID_REQURL } from "./baseTypesObs";
import { bufferToArrayBuffer, getFolderLevels } from "./misc";
import { Entity } from "./baseTypes";
import type { Entity } from "./baseTypes";
import { FakeFs } from "./fsAll";
////////////////////////////////////////////////////////////////////////////////
@ -339,15 +341,16 @@ const fromS3ObjectToEntity = (
const fromS3HeadObjectToEntity = (
fileOrFolderPathWithRemotePrefix: string,
x: HeadObjectCommandOutput,
remotePrefix: string
remotePrefix: string,
useAccurateMTime: boolean
) => {
// console.debug(`fromS3HeadObjectToEntity: ${fileOrFolderPathWithRemotePrefix}: ${JSON.stringify(x,null,2)}`);
// S3 officially only supports seconds precision!!!!!
const mtimeSvr = Math.floor(x.LastModified!.valueOf() / 1000.0) * 1000;
let mtimeCli = mtimeSvr;
if (x.Metadata !== undefined) {
if (useAccurateMTime && x.Metadata !== undefined) {
const m2 = Math.floor(
parseFloat(x.Metadata.mtime || x.Metadata.MTime || "0")
Number.parseFloat(x.Metadata.mtime || x.Metadata.MTime || "0")
);
if (m2 !== 0) {
// to be compatible with RClone, we read and store the time in seconds in new version!
@ -396,9 +399,16 @@ export class FakeFsS3 extends FakeFs {
}
async walk(): Promise<Entity[]> {
const res = (await this._walkFromRoot(this.s3Config.remotePrefix)).filter(
(x) => x.key !== "" && x.key !== "/"
);
const res = (
await this._walkFromRoot(this.s3Config.remotePrefix, false)
).filter((x) => x.key !== "" && x.key !== "/");
return res;
}
async walkPartial(): Promise<Entity[]> {
const res = (
await this._walkFromRoot(this.s3Config.remotePrefix, true)
).filter((x) => x.key !== "" && x.key !== "/");
return res;
}
@ -406,19 +416,23 @@ export class FakeFsS3 extends FakeFs {
* the input key contains basedir (prefix),
* but the result doesn't contain it.
*/
async _walkFromRoot(prefixOfRawKeys: string | undefined) {
async _walkFromRoot(prefixOfRawKeys: string | undefined, partial: boolean) {
const confCmd = {
Bucket: this.s3Config.s3BucketName,
} as ListObjectsV2CommandInput;
if (prefixOfRawKeys !== undefined && prefixOfRawKeys !== "") {
confCmd.Prefix = prefixOfRawKeys;
}
if (partial) {
confCmd.MaxKeys = 10; // no need to list more!
}
const contents = [] as _Object[];
const mtimeRecords: Record<string, number> = {};
const ctimeRecords: Record<string, number> = {};
const partsConcurrency = partial ? 1 : this.s3Config.partsConcurrency;
const queueHead = new PQueue({
concurrency: this.s3Config.partsConcurrency,
concurrency: partsConcurrency,
autoStart: true,
});
queueHead.on("error", (error) => {
@ -456,12 +470,12 @@ export class FakeFsS3 extends FakeFs {
// pass
} else {
mtimeRecords[content.Key!] = Math.floor(
parseFloat(
Number.parseFloat(
rspHead.Metadata.mtime || rspHead.Metadata.MTime || "0"
)
);
ctimeRecords[content.Key!] = Math.floor(
parseFloat(
Number.parseFloat(
rspHead.Metadata.ctime || rspHead.Metadata.CTime || "0"
)
);
@ -470,14 +484,20 @@ export class FakeFsS3 extends FakeFs {
}
}
isTruncated = rsp.IsTruncated ?? false;
confCmd.ContinuationToken = rsp.NextContinuationToken;
if (
isTruncated &&
(confCmd.ContinuationToken === undefined ||
confCmd.ContinuationToken === "")
) {
throw Error("isTruncated is true but no continuationToken provided");
if (partial) {
// do not loop over
isTruncated = false;
} else {
// loop over
isTruncated = rsp.IsTruncated ?? false;
confCmd.ContinuationToken = rsp.NextContinuationToken;
if (
isTruncated &&
(confCmd.ContinuationToken === undefined ||
confCmd.ContinuationToken === "")
) {
throw Error("isTruncated is true but no continuationToken provided");
}
}
} while (isTruncated);
@ -560,7 +580,12 @@ export class FakeFsS3 extends FakeFs {
})
);
return fromS3HeadObjectToEntity(key, res, this.s3Config.remotePrefix ?? "");
return fromS3HeadObjectToEntity(
key,
res,
this.s3Config.remotePrefix ?? "",
this.s3Config.useAccurateMTime ?? false
);
}
async mkdir(key: string, mtime?: number, ctime?: number): Promise<Entity> {
@ -721,6 +746,10 @@ export class FakeFsS3 extends FakeFs {
return bodyContents;
}
async rename(key1: string, key2: string): Promise<void> {
throw Error(`rename not implemented for s3`);
}
async rm(key: string): Promise<void> {
if (key === "/") {
return;
@ -751,13 +780,6 @@ export class FakeFsS3 extends FakeFs {
async checkConnect(callbackFunc?: any): Promise<boolean> {
try {
// TODO: no universal way now, just check this in connectivity
if (Platform.isIosApp && this.s3Config.s3Endpoint.startsWith("http://")) {
throw Error(
`Your s3 endpoint could only be https, not http, because of the iOS restriction.`
);
}
// const results = await this.s3Client.send(
// new HeadBucketCommand({ Bucket: this.s3Config.s3BucketName })
// );
@ -809,4 +831,8 @@ export class FakeFsS3 extends FakeFs {
async revokeAuth() {
throw new Error("Method not implemented.");
}
allowEmptyFile(): boolean {
return true;
}
}

View File

@ -1,20 +1,23 @@
import { getReasonPhrase } from "http-status-codes/build/cjs/utils-functions";
import { Buffer } from "buffer";
import cloneDeep from "lodash/cloneDeep";
import { Queue } from "@fyears/tsqueue";
import { getReasonPhrase } from "http-status-codes/build/cjs/utils-functions";
import chunk from "lodash/chunk";
import cloneDeep from "lodash/cloneDeep";
import flatten from "lodash/flatten";
import { Platform, requestUrl } from "obsidian";
import { FakeFs } from "./fsAll";
import { bufferToArrayBuffer } from "./misc";
import { Entity, VALID_REQURL, WebdavConfig } from "./baseTypes";
import isString from "lodash/isString";
import { nanoid } from "nanoid";
import { Platform, type RequestUrlParam, requestUrl } from "obsidian";
import type {
FileStat,
WebDAVClient,
RequestOptionsWithState,
WebDAVClient,
// Response,
// ResponseDataDetailed,
} from "webdav";
import type { Entity, WebdavConfig } from "./baseTypes";
import { VALID_REQURL } from "./baseTypesObs";
import { FakeFs } from "./fsAll";
import { bufferToArrayBuffer, delay, splitFileSizeToChunkRanges } from "./misc";
/**
* https://stackoverflow.com/questions/32850898/how-to-check-if-a-string-has-any-non-iso-8859-1-characters-with-javascript
@ -54,20 +57,22 @@ if (VALID_REQURL) {
retractedHeaders["authorization"] = "<retracted>";
}
console.debug(`before request:`);
console.debug(`url: ${options.url}`);
console.debug(`method: ${options.method}`);
console.debug(`headers: ${JSON.stringify(retractedHeaders, null, 2)}`);
console.debug(`reqContentType: ${reqContentType}`);
// console.debug(`before request:`);
// console.debug(`url: ${options.url}`);
// console.debug(`method: ${options.method}`);
// console.debug(`headers: ${JSON.stringify(retractedHeaders, null, 2)}`);
// console.debug(`reqContentType: ${reqContentType}`);
let r = await requestUrl({
const p: RequestUrlParam = {
url: options.url,
method: options.method,
body: options.data as string | ArrayBuffer,
headers: transformedHeaders,
contentType: reqContentType,
throw: false,
});
};
let r = await requestUrl(p);
if (
r.status === 401 &&
@ -81,20 +86,14 @@ if (VALID_REQURL) {
// if a folder doesn't exist without slash, the servers return 401 instead of 404
// here is a dirty hack that works
console.debug(`so we have 401, try appending request url with slash`);
r = await requestUrl({
url: `${options.url}/`,
method: options.method,
body: options.data as string | ArrayBuffer,
headers: transformedHeaders,
contentType: reqContentType,
throw: false,
});
p.url = `${options.url}/`;
r = await requestUrl(p);
}
console.debug(`after request:`);
// console.debug(`after request:`);
const rspHeaders = objKeyToLower({ ...r.headers });
console.debug(`rspHeaders: ${JSON.stringify(rspHeaders, null, 2)}`);
for (let key in rspHeaders) {
// console.debug(`rspHeaders: ${JSON.stringify(rspHeaders, null, 2)}`);
for (const key in rspHeaders) {
if (rspHeaders.hasOwnProperty(key)) {
// avoid the error:
// Failed to read the 'headers' property from 'ResponseInit': String contains non ISO-8859-1 code point.
@ -110,7 +109,7 @@ if (VALID_REQURL) {
// }
// }
if (!onlyAscii(rspHeaders[key])) {
console.debug(`rspHeaders[key] needs encode: ${key}`);
// console.debug(`rspHeaders[key] needs encode: ${key}`);
rspHeaders[key] = encodeURIComponent(rspHeaders[key]);
}
}
@ -118,7 +117,7 @@ if (VALID_REQURL) {
let r2: Response | undefined = undefined;
const statusText = getReasonPhrase(r.status);
console.debug(`statusText: ${statusText}`);
// console.debug(`statusText: ${statusText}`);
if ([101, 103, 204, 205, 304].includes(r.status)) {
// A null body status is a status that is 101, 103, 204, 205, or 304.
// https://fetch.spec.whatwg.org/#statuses
@ -142,6 +141,7 @@ if (VALID_REQURL) {
}
// @ts-ignore
// biome-ignore lint: we want to ts-ignore the next line
import { AuthType, BufferLike, createClient } from "webdav/dist/web/index.js";
export const DEFAULT_WEBDAV_CONFIG = {
@ -201,6 +201,14 @@ const fromWebdavItemToEntity = (x: FileStat, remoteBaseDir: string): Entity => {
};
};
const tryEncodeURI = (x: string) => {
if (x.includes("%")) {
// likely encoded before!
return x;
}
return encodeURI(x);
};
export class FakeFsWebdav extends FakeFs {
kind: "webdav";
@ -210,6 +218,10 @@ export class FakeFsWebdav extends FakeFs {
vaultFolderExists: boolean;
saveUpdatedConfigFunc: () => Promise<any>;
supportNativePartial: boolean;
isNextcloud: boolean;
nextcloudUploadServerAddress: string;
constructor(
webdavConfig: WebdavConfig,
vaultName: string,
@ -218,10 +230,14 @@ export class FakeFsWebdav extends FakeFs {
super();
this.kind = "webdav";
this.webdavConfig = cloneDeep(webdavConfig);
this.webdavConfig.address = encodeURI(this.webdavConfig.address);
this.webdavConfig.address = tryEncodeURI(this.webdavConfig.address);
this.remoteBaseDir = this.webdavConfig.remoteBaseDir || vaultName || "";
this.vaultFolderExists = false;
this.saveUpdatedConfigFunc = saveUpdatedConfigFunc;
this.supportNativePartial = false;
this.isNextcloud = false;
this.nextcloudUploadServerAddress = "";
}
async _init() {
@ -230,12 +246,6 @@ export class FakeFsWebdav extends FakeFs {
return;
}
if (Platform.isIosApp && !this.webdavConfig.address.startsWith("https")) {
throw Error(
`Your webdav address could only be https, not http, because of the iOS restriction.`
);
}
const headers = {
"Cache-Control": "no-cache",
};
@ -291,6 +301,81 @@ export class FakeFsWebdav extends FakeFs {
);
}
}
await this._checkPartialSupport();
}
/**
* <server>/remote.php/dav/files/<userid>
* => <server>/remote.php/dav/uploads/<userid>
*/
_getnextcloudUploadServerAddress = () => {
let k = this.webdavConfig.address;
if (k.endsWith("/")) {
// no tailing slash
k = k.substring(0, k.length - 1);
}
const s = k.split("/");
if (
s.length > 3 &&
s[s.length - 3] === "dav" &&
s[s.length - 2] === "files" &&
s[s.length - 1] !== ""
) {
s[s.length - 2] = "uploads";
return s.join("/");
}
throw Error(`cannot construct upload address for ${s}`);
};
async _checkPartialSupport() {
const compliance = await this.client.getDAVCompliance(
`/${this.remoteBaseDir}/`
);
for (const c of compliance.compliance) {
// nextcloud AND with an account
if (
c.toLocaleLowerCase().includes("nextcloud") &&
this.webdavConfig.username !== "" &&
this.webdavConfig.password !== ""
) {
// the address is parsable
const s = this.webdavConfig.address.split("/");
if (
s.length > 3 &&
s[s.length - 3] === "dav" &&
s[s.length - 2] === "files" &&
s[s.length - 1] !== ""
) {
this.isNextcloud = true;
this.nextcloudUploadServerAddress =
this._getnextcloudUploadServerAddress();
console.debug(
`isNextcloud=${this.isNextcloud}, uploadFolder=${this.nextcloudUploadServerAddress}`
);
return true;
} else {
return false;
}
}
}
// taken from https://github.com/perry-mitchell/webdav-client/blob/master/source/operations/partialUpdateFileContents.ts
// which is under MIT license
if (
(compliance.server.includes("Apache") &&
compliance.compliance.includes(
"<http://apache.org/dav/propset/fs/1>"
)) ||
compliance.compliance.includes("sabredav-partialupdate")
) {
this.supportNativePartial = true;
console.debug(`supportNativePartial=true`);
return true;
}
return false;
}
async walk(): Promise<Entity[]> {
@ -327,7 +412,20 @@ export class FakeFsWebdav extends FakeFs {
// glob: "/**" /* avoid dot files by using glob */,
}) as Promise<FileStat[]>;
});
const r2 = flatten(await Promise.all(r));
const r3 = await Promise.all(r);
for (const r4 of r3) {
if (
this.webdavConfig.address.includes("jianguoyun.com") &&
r4.length >= 749
) {
// https://help.jianguoyun.com/?p=2064
// no more than 750 per request
throw Error(
`出错:坚果云 api 有限制,文件列表加载不全。终止同步!`
);
}
}
const r2 = flatten(r3);
subContents.push(...r2);
}
for (let i = 0; i < subContents.length; ++i) {
@ -355,6 +453,19 @@ export class FakeFsWebdav extends FakeFs {
return contents.map((x) => fromWebdavItemToEntity(x, this.remoteBaseDir));
}
async walkPartial(): Promise<Entity[]> {
await this._init();
const contents = (await this.client.getDirectoryContents(
`/${this.remoteBaseDir}`,
{
deep: false, // partial, no need to recursive here
details: false /* no need for verbose details here */,
}
)) as FileStat[];
return contents.map((x) => fromWebdavItemToEntity(x, this.remoteBaseDir));
}
async stat(key: string): Promise<Entity> {
await this._init();
const fullPath = getWebdavPath(key, this.remoteBaseDir);
@ -382,8 +493,10 @@ export class FakeFsWebdav extends FakeFs {
mtime?: number,
ctime?: number
): Promise<Entity> {
// the sync algorithm should do recursive manually already.
// if we set recursive: true here, Digest auth will return some error inside the PROPFIND
await this.client.createDirectory(key, {
recursive: true,
recursive: false,
});
return await this._statFromRoot(key);
}
@ -399,22 +512,251 @@ export class FakeFsWebdav extends FakeFs {
}
await this._init();
const uploadFile = getWebdavPath(key, this.remoteBaseDir);
return await this._writeFileFromRoot(uploadFile, content, mtime, ctime);
return await this._writeFileFromRoot(
uploadFile,
content,
mtime,
ctime,
key
);
}
async _writeFileFromRoot(
key: string,
content: ArrayBuffer,
mtime: number,
ctime: number
ctime: number,
origKey: string
): Promise<Entity> {
// less than 10 MB
if (content.byteLength <= 10 * 1024 * 1024) {
return await this._writeFileFromRootFull(
key,
content,
mtime,
ctime,
origKey
);
}
// larger than 10 MB
if (!this.isNextcloud && !this.supportNativePartial) {
// give up and upload by whole, and directly return
return await this._writeFileFromRootFull(
key,
content,
mtime,
ctime,
origKey
);
}
// try to upload by chunks
try {
if (this.isNextcloud) {
return await this._writeFileFromRootNextcloud(
key,
content,
mtime,
ctime,
origKey
);
} else if (this.supportNativePartial) {
return await this._writeFileFromRootNativePartial(
key,
content,
mtime,
ctime,
origKey
);
}
throw Error(`Error: partial upload / update method is not implemented??`);
} catch (e) {
console.error(
`we fail to write file partially for nextcloud or apache or sabre/dav, stop!`
);
console.error(e);
throw e;
// this.isNextcloud = false;
// this.supportNativePartial = false;
// return await this._writeFileFromRootFull(
// key,
// content,
// mtime,
// ctime,
// origKey
// );
}
}
async _writeFileFromRootFull(
key: string,
content: ArrayBuffer,
mtime: number,
ctime: number,
origKey: string
): Promise<Entity> {
// console.debug(`start _writeFileFromRootFull`);
await this.client.putFileContents(key, content, {
overwrite: true,
onUploadProgress: (progress: any) => {
console.info(`Uploaded ${progress.loaded} bytes of ${progress.total}`);
},
});
return await this._statFromRoot(key);
const k = await this._statFromRoot(key);
// console.debug(`end _writeFileFromRootFull`);
return k;
}
/**
* https://docs.nextcloud.com/server/latest/developer_manual/client_apis/WebDAV/chunking.html
* @param key
* @param content
* @param mtime
* @param ctime
* @returns
*/
async _writeFileFromRootNextcloud(
key: string,
content: ArrayBuffer,
mtime: number,
ctime: number,
origKey: string
): Promise<Entity> {
if (key.endsWith("/")) {
throw Error(
`key=${key} should not have tailing slash in _writeFileFromRootNextcloud`
);
}
const destUrl = `${this.webdavConfig.address}/${encodeURI(key)}`;
console.debug(`destUrl=${destUrl}`);
const getTmpFolder = (x: string) => {
if (x.endsWith("/")) {
throw Error(`file to upload by chunk should not ends with /`);
}
const y = x.split("/");
const z = encodeURI(`${y[y.length - 1]}`);
return z;
};
const uploadServerAddress = this.nextcloudUploadServerAddress;
console.debug(`uploadServerAddress=${uploadServerAddress}`);
const tmpFolderName = getTmpFolder(key);
console.debug(`tmpFolderName=${tmpFolderName}`);
const clientForUpload = createClient(uploadServerAddress, {
username: this.webdavConfig.username,
password: this.webdavConfig.password,
headers: {
"Cache-Control": "no-cache",
},
authType:
this.webdavConfig.authType === "digest"
? AuthType.Digest
: AuthType.Password,
});
// create folder
await clientForUpload.createDirectory(tmpFolderName, {
method: "MKCOL",
headers: {
Destination: destUrl,
},
});
console.debug(`finish creating folder`);
// upload by chunks
const sizePerChunk = 5 * 1024 * 1024; // 5 mb
const chunkRanges = splitFileSizeToChunkRanges(
content.byteLength,
sizePerChunk
);
for (let i = 0; i < chunkRanges.length; ++i) {
const { start, end } = chunkRanges[i];
const tmpFileName = `${i + 1}`.padStart(5, "0");
const tmpFileNameWithFolder = `${tmpFolderName}/${tmpFileName}`;
console.debug(
`start to upload chunk ${
i + 1
} to ${tmpFileNameWithFolder} with startInclusive=${start}, endInclusive=${end}`
);
await clientForUpload.putFileContents(
tmpFileNameWithFolder,
content.slice(start, end + 1),
{
headers: {
Destination: destUrl,
"OC-Total-Length": `${content.byteLength}`,
},
}
);
}
console.debug(`finish upload all chunks`);
// move to assemble
const fakeFileToMoveUrl = `${tmpFolderName}/.file`;
console.debug(`fakeFileToMoveUrl=${fakeFileToMoveUrl}`);
await clientForUpload.customRequest(fakeFileToMoveUrl, {
method: "MOVE",
headers: {
Destination: destUrl,
"OC-Total-Length": `${content.byteLength}`,
},
});
console.debug(`finish moving file`);
// TODO: setting X-OC-Mtime
// stat
console.debug(`before stat origKey=${origKey}`);
const k = await this.stat(origKey);
console.debug(`after stat`);
if (k.sizeRaw !== content.byteLength) {
// we failed!
this.isNextcloud = false; // give up next time!
const err = `unable to upload file ${key} by chunks to nextcloud`;
console.error(err);
throw Error(err);
}
console.debug(`after stat, k=${JSON.stringify(k, null, 2)}`);
return k;
}
async _writeFileFromRootNativePartial(
key: string,
content: ArrayBuffer,
mtime: number,
ctime: number,
origKey: string
): Promise<Entity> {
// firstly upload a 0-byte data
await this._writeFileFromRootFull(
key,
new ArrayBuffer(0),
mtime,
ctime,
origKey
);
// then "update" by chunks
const sizePerChunk = 5 * 1024 * 1024; // 5 mb
const chunkRanges = splitFileSizeToChunkRanges(
content.byteLength,
sizePerChunk
);
for (let i = 0; i < chunkRanges.length; ++i) {
const { start, end } = chunkRanges[i];
await this.client.partialUpdateFileContents(
key,
start,
end,
content.slice(start, end + 1)
);
}
// lastly return
return await this.stat(origKey);
}
async readFile(key: string): Promise<ArrayBuffer> {
@ -436,6 +778,16 @@ export class FakeFsWebdav extends FakeFs {
throw Error(`unexpected file content result with type ${typeof buff}`);
}
async rename(key1: string, key2: string): Promise<void> {
if (key1 === "/" || key2 === "/") {
return;
}
const remoteFileName1 = getWebdavPath(key1, this.remoteBaseDir);
const remoteFileName2 = getWebdavPath(key2, this.remoteBaseDir);
await this._init();
await this.client.moveFile(remoteFileName1, remoteFileName2);
}
async rm(key: string): Promise<void> {
if (key === "/") {
return;
@ -491,4 +843,8 @@ export class FakeFsWebdav extends FakeFs {
async revokeAuth() {
throw new Error("Method not implemented.");
}
allowEmptyFile(): boolean {
return true;
}
}

274
src/fsWebdis.ts Normal file
View File

@ -0,0 +1,274 @@
import { isEqual } from "lodash";
import {
DEFAULT_CONTENT_TYPE,
type Entity,
type WebdisConfig,
} from "./baseTypes";
import { FakeFs } from "./fsAll";
export const DEFAULT_WEBDIS_CONFIG: WebdisConfig = {
address: "",
username: "",
password: "",
remoteBaseDir: "",
};
const getWebdisPath = (fileOrFolderPath: string, remoteBaseDir: string) => {
let key = fileOrFolderPath;
if (fileOrFolderPath === "/" || fileOrFolderPath === "") {
// special
key = `${remoteBaseDir}`;
} else if (fileOrFolderPath.startsWith("/")) {
console.warn(
`why the path ${fileOrFolderPath} starts with '/'? but we just go on.`
);
key = `${remoteBaseDir}${fileOrFolderPath}`;
} else {
key = `${remoteBaseDir}/${fileOrFolderPath}`;
}
return `rs:fs:v1:${encodeURIComponent(key)}`; // we should encode them!!!!
};
export const getOrigPath = (fullKey: string, remoteBaseDir: string) => {
const fullKeyDecoded = decodeURIComponent(fullKey);
const prefix = `rs:fs:v1:${remoteBaseDir}/`;
// console.debug(`prefix=${prefix}`);
const suffix1 = ":meta";
const suffix2 = ":content";
if (!fullKeyDecoded.startsWith(prefix)) {
throw Error(`you should not call getOrigEntity on ${fullKey}`);
}
let realKey = fullKeyDecoded.slice(prefix.length);
// console.debug(`realKey=${realKey}`);
if (realKey.endsWith(suffix1)) {
realKey = realKey.slice(0, -suffix1.length);
// console.debug(`realKey=${realKey}`);
} else if (realKey.endsWith(suffix2)) {
realKey = realKey.slice(0, -suffix2.length);
// console.debug(`realKey=${realKey}`);
}
// console.debug(`fullKey=${fullKey}, realKey=${realKey}`);
return realKey;
};
export class FakeFsWebdis extends FakeFs {
kind: "webdis";
webdisConfig: WebdisConfig;
remoteBaseDir: string;
saveUpdatedConfigFunc: () => Promise<any>;
constructor(
webdisConfig: WebdisConfig,
vaultName: string,
saveUpdatedConfigFunc: () => Promise<any>
) {
super();
this.kind = "webdis";
this.webdisConfig = webdisConfig;
this.remoteBaseDir = this.webdisConfig.remoteBaseDir || vaultName || "";
this.saveUpdatedConfigFunc = saveUpdatedConfigFunc;
}
async _fetchCommand(
method: "GET" | "POST" | "PUT",
urlPath: string,
content?: ArrayBuffer
) {
const address = this.webdisConfig.address;
if (!address.startsWith(`https://`) && !address.startsWith(`http://`)) {
throw Error(
`your webdis server address should start with https:// or http://`
);
}
if (address.endsWith("/")) {
throw Error(`your webdis server should not ends with /`);
}
if (content !== undefined && method !== "PUT") {
throw Error(`you can only "POST" ArrayBuffer, not using other methods`);
}
const fullUrl = `${address}/${urlPath}`;
// console.debug(`fullUrl=${fullUrl}`)
const username = this.webdisConfig.username ?? "";
const password = this.webdisConfig.password ?? "";
if (username !== "" && password !== "") {
return await fetch(fullUrl, {
method: method,
headers: {
Authorization: "Basic " + btoa(username + ":" + password),
},
body: content,
});
} else if (username === "" && password === "") {
return await fetch(fullUrl, {
method: method,
body: content,
});
} else {
throw Error(
`your username and password should both be empty or not empty!`
);
}
}
async walk(): Promise<Entity[]> {
let cursor = "0";
const res: Entity[] = [];
do {
const command = `SCAN/${cursor}/MATCH/rs:fs:v1:*:meta/COUNT/1000`;
const rsp = (await (await this._fetchCommand("GET", command)).json())[
"SCAN"
];
// console.debug(rsp);
cursor = rsp[0];
for (const fullKeyWithMeta of rsp[1]) {
const realKey = getOrigPath(fullKeyWithMeta, this.remoteBaseDir);
res.push(await this.stat(realKey));
}
} while (cursor !== "0");
// console.debug(`walk res:`);
// console.debug(res);
return res;
}
async walkPartial(): Promise<Entity[]> {
let cursor = "0";
const res: Entity[] = [];
const command = `SCAN/${cursor}/MATCH/rs:fs:v1:*:meta/COUNT/10`; // fewer keys
const rsp = (await (await this._fetchCommand("GET", command)).json())[
"SCAN"
];
// console.debug(rsp);
cursor = rsp[0];
for (const fullKeyWithMeta of rsp[1]) {
const realKey = getOrigPath(fullKeyWithMeta, this.remoteBaseDir);
res.push(await this.stat(realKey));
}
// no need to loop over cursor
// console.debug(`walk res:`);
// console.debug(res);
return res;
}
async stat(key: string): Promise<Entity> {
const fullKey = getWebdisPath(key, this.remoteBaseDir);
return await this._statFromRaw(fullKey);
}
async _statFromRaw(key: string): Promise<Entity> {
// console.debug(`_statFromRaw on ${key}`);
const command = `HGETALL/${key}:meta`;
const rsp = (await (await this._fetchCommand("GET", command)).json())[
"HGETALL"
];
// console.debug(`rsp: ${JSON.stringify(rsp, null, 2)}`);
if (isEqual(rsp, {})) {
// empty!
throw Error(`key ${key} doesn't exist!`);
}
const realKey = getOrigPath(key, this.remoteBaseDir);
return {
key: realKey,
keyRaw: realKey,
mtimeCli: Number.parseInt(rsp["mtime"]),
mtimeSvr: Number.parseInt(rsp["mtime"]),
size: Number.parseInt(rsp["size"]),
sizeRaw: Number.parseInt(rsp["size"]),
};
}
async mkdir(key: string, mtime?: number, ctime?: number): Promise<Entity> {
let command = `HSET/${getWebdisPath(key, this.remoteBaseDir)}:meta/size/0`;
if (mtime !== undefined && mtime !== 0) {
command = `${command}/mtime/${mtime}`;
}
if (ctime !== undefined && ctime !== 0) {
command = `${command}/ctime/${ctime}`;
}
const rsp = (await (await this._fetchCommand("GET", command)).json())[
"HSET"
];
return await this.stat(key);
}
async writeFile(
key: string,
content: ArrayBuffer,
mtime: number,
ctime: number
): Promise<Entity> {
const fullKey = getWebdisPath(key, this.remoteBaseDir);
// meta
let command1 = `HSET/${fullKey}:meta/size/${content.byteLength}`;
if (mtime !== undefined && mtime !== 0) {
command1 = `${command1}/mtime/${mtime}`;
}
if (ctime !== undefined && ctime !== 0) {
command1 = `${command1}/ctime/${ctime}`;
}
const rsp1 = (await (await this._fetchCommand("GET", command1)).json())[
"HSET"
];
// content
const command2 = `SET/${fullKey}:content`;
const rsp2 = (
await (await this._fetchCommand("PUT", command2, content)).json()
)["SET"];
// fetch meta
return await this.stat(key);
}
async readFile(key: string): Promise<ArrayBuffer> {
const fullKey = getWebdisPath(key, this.remoteBaseDir);
const command = `GET/${fullKey}:content?type=${DEFAULT_CONTENT_TYPE}`;
const rsp = await (await this._fetchCommand("GET", command)).arrayBuffer();
return rsp;
}
async rename(key1: string, key2: string): Promise<void> {
const fullKey1 = getWebdisPath(key1, this.remoteBaseDir);
const fullKey2 = getWebdisPath(key2, this.remoteBaseDir);
const commandContent = `RENAME/${fullKey1}:content/${fullKey2}:content`;
await this._fetchCommand("POST", commandContent);
const commandMeta = `RENAME/${fullKey1}:meta/${fullKey2}:meta`;
await this._fetchCommand("POST", commandMeta);
}
async rm(key: string): Promise<void> {
const fullKey = getWebdisPath(key, this.remoteBaseDir);
const command = `DEL/${fullKey}:meta/${fullKey}:content`;
const rsp = (await (await this._fetchCommand("PUT", command)).json())[
"DEL"
];
}
async checkConnect(callbackFunc?: any): Promise<boolean> {
try {
const k = await (
await this._fetchCommand("GET", "PING/helloworld")
).json();
return isEqual(k, { PING: "helloworld" });
} catch (err: any) {
console.error(err);
callbackFunc?.(err);
return false;
}
}
async getUserDisplayName(): Promise<string> {
return this.webdisConfig.username || "<no usernme>";
}
async revokeAuth(): Promise<any> {
throw new Error("Method not implemented.");
}
allowEmptyFile(): boolean {
return true;
}
}

View File

@ -1,7 +1,11 @@
import merge from "lodash/merge";
import Mustache from "mustache";
import { moment } from "obsidian";
import { LANGS } from "./langs";
import { LANGS as LANGS_PRO } from "../pro/src/langs";
import { LANGS as LANGS_BASIC } from "./langs";
const LANGS = merge(LANGS_BASIC, LANGS_PRO);
export type LangType = keyof typeof LANGS;
export type LangTypeAndAuto = LangType | "auto";

View File

@ -1,11 +1,11 @@
import QRCode from "qrcode";
import cloneDeep from "lodash/cloneDeep";
import QRCode from "qrcode";
import {
COMMAND_URI,
UriParams,
RemotelySavePluginSettings,
QRExportType,
type QRExportType,
type RemotelySavePluginSettings,
type UriParams,
} from "./baseTypes";
import { getShrinkedSettings } from "./fsOnedrive";
@ -17,14 +17,27 @@ export const exportQrCodeUri = async (
) => {
let settings2: Partial<RemotelySavePluginSettings> = {};
if (exportFields === "all_but_oauth2") {
if (exportFields === "basic_and_advanced") {
settings2 = cloneDeep(settings);
delete settings2.s3;
delete settings2.dropbox;
delete settings2.onedrive;
delete settings2.webdav;
delete settings2.webdis;
delete settings2.googledrive;
delete settings2.pro;
} else if (exportFields === "s3") {
settings2 = { s3: cloneDeep(settings.s3) };
} else if (exportFields === "dropbox") {
settings2 = { dropbox: cloneDeep(settings.dropbox) };
} else if (exportFields === "onedrive") {
settings2 = { onedrive: getShrinkedSettings(settings.onedrive) };
} else if (exportFields === "webdav") {
settings2 = { webdav: cloneDeep(settings.webdav) };
} else if (exportFields === "webdis") {
settings2 = { webdis: cloneDeep(settings.webdis) };
} else if (exportFields === "googledrive") {
settings2 = { googledrive: cloneDeep(settings.googledrive) };
}
delete settings2.vaultRandomID;
@ -64,7 +77,7 @@ export const importQrCodeUri = (
inputParams: any,
currentVaultName: string
): ProcessQrCodeResultType => {
let params = inputParams as UriParams;
const params = inputParams as UriParams;
if (
params.func === undefined ||
params.func !== "settings" ||

View File

@ -9,9 +9,9 @@
"syncrun_syncingribbon": "{{pluginName}}: syncing from {{triggerSource}}",
"syncrun_step0": "0/8 Remotely Save is running in dry mode, thus not actual file changes would happen.",
"syncrun_step1": "1/8 Remotely Save is preparing ({{serviceType}})",
"syncrun_step2": "2/8 Starting to fetch remote meta data.",
"syncrun_step3": "3/8 Checking password correct or not.",
"syncrun_step2": "2/8 Starting to fetch remote meta data and check password.",
"syncrun_passworderr": "Something goes wrong while checking password.",
"syncrun_step3": "3/8 Starting to process remote data.",
"syncrun_step4": "4/8 Starting to fetch local meta data.",
"syncrun_step5": "5/8 Starting to fetch local prev sync data.",
"syncrun_step6": "6/8 Starting to generate sync plan.",
@ -38,6 +38,7 @@
"protocol_onedrive_connect_unknown": "Do not know how to deal with the callback: {{params}}",
"command_startsync": "start sync",
"command_drynrun": "start sync (dry run only)",
"command_exportsyncplans_1_only_change": "export sync plans (latest 1) (change part)",
"command_exportsyncplans_1": "export sync plans (latest 1)",
"command_exportsyncplans_5": "export sync plans (latest 5)",
"command_exportsyncplans_all": "export sync plans (all)",
@ -50,9 +51,10 @@
"statusbar_time_days": "Synced {{time}} days ago",
"statusbar_time_hours": "Synced {{time}} hours ago",
"statusbar_time_minutes": "Synced {{time}} minutes ago",
"statusbar_time_lessminute": "Synced less than a minute ago",
"statusbar_time_lessminute": "Synced last minute ago",
"statusbar_lastsync": "Synced {{time}} ago",
"statusbar_syncing": "Syncing...",
"statusbar_failed": "Last sync failed",
"statusbar_now": "Synced just now",
"statusbar_lastsync_label": "Last successful Sync on {{date}}",
"statusbar_lastsync_never": "Never Synced",
@ -114,9 +116,9 @@
"modal_sizesconflict_copynotice": "All the sizes conflicts info have been copied to the clipboard!",
"settings_basic": "Basic Settings",
"settings_password": "Encryption Password",
"settings_password_desc": "Password for E2E encryption. Empty for no password. You need to click \"Confirm\". Attention: The password and other info are saved locally. After changing the password, you need to manually delete every original files in the remote, and re-sync (so that upload) the encrypted files again.",
"settings_password_desc": "Password for E2E encryption. Empty for no password. You need to click \"Confirm\". Attention: The password and other info are saved locally. After changing the password, you need to manually delete all files from the remote location and re-sync to upload the encrypted files again.",
"settings_encryptionmethod": "Encryption Method",
"settings_encryptionmethod_desc": "Encryption method for E2E encryption. RClone Crypt format is recommended but it doesn't encrypt path structure. OpenSSL enc is the legacy format of this plugin. <b>Both are not affliated with official RClone and OpenSSL product or community.</b> Attention: After switching the method, you need to manually delete every original files in the remote and re-sync (so that upload) the encrypted files again. More info in the <a href='https://github.com/remotely-save/remotely-save/tree/master/docs/encryption'>online doc</a>.",
"settings_encryptionmethod_desc": "Encryption method for E2E encryption. The RClone Crypt format is recommended, although it does not encrypt the path structure. OpenSSL enc is the legacy format of this plugin. <b>Both are not affliated with the official RClone and OpenSSL products or communities.</b> Attention: After switching encryption methods, you need to manually delete all original files from the remote location and re-sync to upload the encrypted files again. More info is available in the <a href='https://github.com/remotely-save/remotely-save/tree/master/docs/encryption'>online doc</a>.",
"settings_encryptionmethod_rclone": "RClone Crypt (recommended)",
"settings_encryptionmethod_openssl": "OpenSSL enc (legacy)",
@ -128,7 +130,7 @@
"settings_autorun_10min": "every 10 minutes",
"settings_autorun_30min": "every 30 minutes",
"settings_runoncestartup": "Run Once On Start Up Automatically",
"settings_runoncestartup_desc": "This settings allows setting running ONCE on start up automatically. This will take effect on NEXT start up after changing. This setting, is different from \"schedule for auto run\" which starts syncing after EVERY interval.",
"settings_runoncestartup_desc": "This setting causes the sync to run once automatically at startup. Changes will take effect on the NEXT startup. This setting is different from the \"schedule for auto run\" setting, which initiates syncing after every specified interval.",
"settings_runoncestartup_notset": "(not set)",
"settings_runoncestartup_1sec": "sync once after 1 second of start up",
"settings_runoncestartup_10sec": "sync once after 10 seconds of start up",
@ -218,8 +220,12 @@
"settings_onedrive_auth": "Auth",
"settings_onedrive_auth_desc": "Auth.",
"settings_onedrive_auth_button": "Auth",
"settings_onedrive_connect_succ": "Great! We can connect to Onedrive!",
"settings_onedrive_connect_fail": "We cannot connect to Onedrive.",
"settings_onedrive_connect_succ": "Great! We can connect to OneDrive!",
"settings_onedrive_connect_fail": "We cannot connect to OneDrive.",
"settings_onedrive_emptyfile": "Empty File Handling",
"settings_onedrive_emptyfile_desc": "OneDrive doesn't allow uploading empty file (even in its official website). Do you want to show up errors or silently skip the empty files?",
"settings_onedrive_emptyfile_skip": "Skip",
"settings_onedrive_emptyfile_error": "Error and abort",
"settings_webdav": "Remote For Webdav",
"settings_webdav_disclaimer1": "Disclaimer: The information is stored locally. Other malicious/harmful/faulty plugins may read the info. If you see any unintentional access to your webdav server, please immediately change the username and password.",
"settings_webdav_cors_os": "Obsidian desktop>=0.13.25 or iOS>=1.1.1 or Android>=1.2.1 supports bypassing CORS locally. But you are using an old version, and you're suggested to upgrade Obsidian.",
@ -232,7 +238,7 @@
"settings_webdav_password": "Password",
"settings_webdav_password_desc": "Password. Attention: the password and other info are saved locally.",
"settings_webdav_auth": "Auth Type",
"settings_webdav_auth_desc": "If no password, this option would be ignored.",
"settings_webdav_auth_desc": "If no password is provided, this option will be ignored.",
"settings_webdav_depth": "Depth Header Sent To Servers",
"settings_webdav_depth_desc": "Webdav servers should be configured to allow requests with header Depth being '1' or 'Infinity'. If you are not sure what's this, choose \"depth='1'\". If you are sure your server supports depth='infinity', please choose that to get way better performance.",
"settings_webdav_depth_1": "only supports depth='1'",
@ -240,12 +246,25 @@
"settings_webdav_connect_succ": "Great! The webdav server can be accessed.",
"settings_webdav_connect_fail": "The webdav server cannot be reached (possible to be any of address/username/password/authtype errors).",
"settings_webdav_connect_fail_withcors": "The webdav server cannot be reached (possible to be any of address/username/password/authtype/CORS errors).",
"settings_webdis": "Remote For Webdis",
"settings_webdis_disclaimer1": "Disclaimer: This app is NOT an official Redis® Ltd / Redis® OSS / Webdis product. Redis is a registered trademark of Redis Ltd.",
"settings_webdis_disclaimer2": "Disclaimer: The information is stored locally. Other malicious/harmful/faulty plugins may read the info. If you see any unintentional access to your Webdis server, please immediately change the username and password.",
"settings_webdis_folder": "We will store the value with keys prefixed by :{{remoteBaseDir}} on your server.",
"settings_webdis_addr": "Server Address",
"settings_webdis_addr_desc": "Server address.",
"settings_webdis_user": "Username",
"settings_webdis_user_desc": "Username. Attention: the username and other info are saved locally.",
"settings_webdis_password": "Password",
"settings_webdis_password_desc": "Password. Attention: the password and other info are saved locally.",
"settings_webdis_connect_succ": "Great! The Webdis server can be accessed.",
"settings_webdis_connect_fail": "The Webdis server cannot be reached (possible to be any of address/username/password errors).",
"settings_chooseservice": "Choose A Remote Service",
"settings_chooseservice_desc": "Start here. What service are you connecting to? S3, Dropbox, Webdav, or OneDrive for personal?",
"settings_chooseservice_desc": "Start here. What service are you connecting to? S3, Dropbox, Webdav, OneDrive for personal, or Webdis?",
"settings_chooseservice_s3": "S3 or compatible",
"settings_chooseservice_dropbox": "Dropbox",
"settings_chooseservice_webdav": "Webdav",
"settings_chooseservice_onedrive": "OneDrive for personal",
"settings_chooseservice_webdis": "Webdis (HTTP for Redis®)",
"settings_adv": "Advanced Settings",
"settings_concurrency": "Concurrency",
"settings_concurrency_desc": "How many files do you want to download or upload in parallel at most? By default it's set to 5. If you meet any problems such as rate limit, you can reduce the concurrency to a lower value.",
@ -258,7 +277,7 @@
"settings_deletetowhere_system_trash": "system trash (default)",
"settings_deletetowhere_obsidian_trash": "Obsidian .trash folder",
"settings_conflictaction": "Action For Conflict",
"settings_conflictaction_desc": "If a file is created or modified on both side since last update, it's a conflict event. How to deal with it? This only works for bidirectional sync.",
"settings_conflictaction_desc": "<p>If a file is created or modified on both side since last update, it's a conflict event. How to deal with it? This only works for bidirectional sync.</p>",
"settings_conflictaction_keep_newer": "newer version survives (default)",
"settings_conflictaction_keep_larger": "larger size version survives",
"settings_cleanemptyfolder": "Action For Empty Folders",
@ -270,6 +289,9 @@
"settings_protectmodifypercentage_000_desc": "0 (always block)",
"settings_protectmodifypercentage_050_desc": "50 (default)",
"settings_protectmodifypercentage_100_desc": "100 (disable the protection)",
"settings_protectmodifypercentage_custom_desc": "custom",
"settings_protectmodifypercentage_customfield": "Custom Abort Sync If Modification Above Percentage",
"settings_protectmodifypercentage_customfield_desc": "You need to enter a number between 0 (inclusive) and 100 (inclusive). Float number is also allowed.",
"setting_syncdirection": "Sync Direction",
"setting_syncdirection_desc": "Which direction should the plugin sync to? Please be aware that only CHANGED files (based on time and size) are synced regardless any option.",
"setting_syncdirection_bidirectional_desc": "Bidirectional (default)",
@ -280,9 +302,12 @@
"settings_importexport": "Import and Export Partial Settings",
"settings_export": "Export",
"settings_export_desc": "Export settings by generating a QR code or URI.",
"settings_export_all_but_oauth2_button": "Export Non-Oauth2 Part",
"settings_export_basic_and_advanced_button": "Export Basic And Advanced Part",
"settings_export_s3_button": "Export S3 Part",
"settings_export_dropbox_button": "Export Dropbox Part",
"settings_export_onedrive_button": "Export OneDrive Part",
"settings_export_webdav_button": "Export Webdav Part",
"settings_export_webdis_button": "Export Webdis Part",
"settings_import": "Import",
"settings_import_desc": "Paste the exported URI into here and click \"Import\". Or, you can open a camera or scan-qrcode app to scan the QR code.",
"settings_import_button": "Import",
@ -300,6 +325,7 @@
"settings_viewconsolelog_desc": "On desktop, please press \"ctrl+shift+i\" or \"cmd+shift+i\" to view the log. On mobile, please install the third-party plugin <a href='https://obsidian.md/plugins?search=Logstravaganza'>Logstravaganza</a> to export the console log to a note.",
"settings_syncplans": "Export Sync Plans",
"settings_syncplans_desc": "Sync plans are created every time after you trigger sync and before the actual sync. Useful to know what would actually happen in those sync. Click the button to export sync plans.",
"settings_syncplans_button_1_only_change": "Export latest 1 (change part)",
"settings_syncplans_button_1": "Export latest 1",
"settings_syncplans_button_5": "Export latest 5",
"settings_syncplans_button_all": "Export All",
@ -316,6 +342,10 @@
"settings_profiler_results_desc": "The plugin records the time cost of each steps. Here you can export them to know which step is slow.",
"settings_profiler_results_notice": "Profiler results exported.",
"settings_profiler_results_button_all": "Export All",
"settings_profiler_enabledebugprint": "Enable Profiler Printing",
"settings_profiler_enabledebugprint_desc": "Print profiler result in each insertion to console or not?",
"settings_profiler_recordsize": "Enable Profiler Recording Size",
"settings_profiler_recordsize_desc": "Let profiler record object sizes or not?",
"settings_outputbasepathvaultid": "Output Vault Base Path And Randomly Assigned ID",
"settings_outputbasepathvaultid_desc": "For debugging purposes.",
"settings_outputbasepathvaultid_button": "Output",
@ -329,5 +359,7 @@
"syncalgov3_checkbox_manual_backup": "I will backup my vault manually firstly.",
"syncalgov3_checkbox_requiremultidevupdate": "I understand I need to update the plugin ACROSS ALL DEVICES to make them work properly.",
"syncalgov3_button_agree": "Agree",
"syncalgov3_button_disagree": "Do Not Agree"
"syncalgov3_button_disagree": "Do Not Agree",
"menu_check_file_stat": "Check file stats"
}

View File

@ -9,9 +9,9 @@
"syncrun_syncingribbon": "{{pluginName}}:正在由 {{triggerSource}} 触发运行",
"syncrun_step0": "0/8 Remotely Save 在空跑dry run模式不会发生实际的文件交换。",
"syncrun_step1": "1/8 Remotely Save 准备同步({{serviceType}}",
"syncrun_step2": "2/8 正在获取远端的元数据。",
"syncrun_step3": "3/8 正在检查密码正确与否。",
"syncrun_step2": "2/8 正在获取远端的元数据和检查密码。",
"syncrun_passworderr": "检查密码时候出错。",
"syncrun_step3": "3/8 正在处理远端的元数据。",
"syncrun_step4": "4/8 正在获取本地的元数据。",
"syncrun_step5": "5/8 正在获取本地上一次同步的元数据。",
"syncrun_step6": "6/8 正在生成同步计划。",
@ -39,6 +39,7 @@
"command_startsync": "开始同步start sync",
"command_drynrun": "开始同步空跑模式start sync (dry run only)",
"command_exportsyncplans_json": "导出同步计划为 json 格式export sync plans in json format",
"command_exportsyncplans_1_only_change": "导出同步计划(最近 1 次仅修改部分export sync plans (latest 1) (change part)",
"command_exportsyncplans_1": "导出同步计划(最近 1 次export sync plans (latest 1)",
"command_exportsyncplans_5": "导出同步计划(最近 5 次export sync plans (latest 5)",
"command_exportsyncplans_all": "导出同步计划所有export sync plans (all)",
@ -53,6 +54,7 @@
"statusbar_time_lessminute": "一分钟之内同步",
"statusbar_lastsync": "上一次同步于:{{time}}",
"statusbar_syncing": "正在同步",
"statusbar_failed": "上次同步失败了",
"statusbar_now": "刚同步完",
"statusbar_lastsync_label": "上一次同步于:{{date}}",
"statusbar_lastsync_never": "没触发过同步",
@ -219,6 +221,10 @@
"settings_onedrive_auth_button": "鉴权",
"settings_onedrive_connect_succ": "很好!我们可连接上 OneDrive",
"settings_onedrive_connect_fail": "我们未能连接上 OneDrive。",
"settings_onedrive_emptyfile": "空文件处理",
"settings_onedrive_emptyfile_desc": "OneDrive 不允许上传空文件(即使官网也是不允许的)。那么你想跳过空文件还是返回错误?",
"settings_onedrive_emptyfile_skip": "跳过",
"settings_onedrive_emptyfile_error": "返回错误和中断",
"settings_webdav": "Webdav 设置",
"settings_webdav_disclaimer1": "声明:您所输入的信息存储于本地。其它有害的或者出错的插件,是有可能读取到这些信息的。如果您发现了 Webdav 服务器有不符合预期的访问,请立刻修改用户名和密码。",
"settings_webdav_cors_os": "Obsidian 桌面版>=0.13.25 或 iOS>=1.1.1 或 Android>=1.2.1 支持跳过 CORS 设置。但您正在使用旧版,建议升级。",
@ -239,12 +245,25 @@
"settings_webdav_connect_succ": "很好!可以连接上 Webdav 服务器。",
"settings_webdav_connect_fail": "无法连接上 Webdav 服务器。(可能是地址/账号/密码/鉴权类型等错误。)",
"settings_webdav_connect_fail_withcors": "无法连接上 Webdav 服务器。(可能是地址/账号/密码/鉴权类型/CORS 等错误。)",
"settings_webdis": "Webdis 设置",
"settings_webdis_disclaimer1": "声明:此插件不是 Redis® Ltd 或 Redis® 软件或 Wedis 的官方产品。Redis 是 Redis Ltd 的注册商标。",
"settings_webdis_disclaimer2": "声明:您所输入的信息存储于本地。其它有害的或者出错的插件,是有可能读取到这些信息的。如果您发现了 Webdis 服务器有不符合预期的访问,请立刻修改用户名和密码。",
"settings_webdis_folder": "我们会在您的服务器上创建带有此前缀的 key 并在里面同步::{{remoteBaseDir}}。",
"settings_webdis_addr": "服务器地址",
"settings_webdis_addr_desc": "服务器地址",
"settings_webdis_user": "用户名",
"settings_webdis_user_desc": "用户名。注意:用户名和其它信息都会保存在本地。",
"settings_webdis_password": "密码",
"settings_webdis_password_desc": "密码。注意:密码和其它信息都会保存在本地。",
"settings_webdis_connect_succ": "很好!可以连接上 Webdis 服务器。",
"settings_webdis_connect_fail": "无法连接上 Webdis 服务器。(可能是地址/账号/密码/鉴权类型等错误。)",
"settings_chooseservice": "选择远程服务",
"settings_chooseservice_desc": "从这里开始设置。您想连接到哪一个服务S3、Dropbox、Webdav、OneDrive个人版",
"settings_chooseservice_desc": "从这里开始设置。您想连接到哪一个服务S3、Dropbox、Webdav、OneDrive个人版、Webdis",
"settings_chooseservice_s3": "S3 或兼容 S3 的服务",
"settings_chooseservice_dropbox": "Dropbox",
"settings_chooseservice_webdav": "Webdav",
"settings_chooseservice_onedrive": "OneDrive个人版",
"settings_chooseservice_webdis": "Webdis (an HTTP interface for Redis)",
"settings_adv": "进阶设置",
"settings_concurrency": "并行度",
"settings_concurrency_desc": "您希望同时最多有多少个文件被上传和下载?默认值是 5。如果您遇到了一些问题如访问频率限制您可以减少并行度。",
@ -269,6 +288,9 @@
"settings_protectmodifypercentage_000_desc": "0总是强制中止",
"settings_protectmodifypercentage_050_desc": "50默认值",
"settings_protectmodifypercentage_100_desc": "100去除此保护",
"settings_protectmodifypercentage_custom_desc": "自定义",
"settings_protectmodifypercentage_customfield": "如果修改超过自定义百分比则中止同步",
"settings_protectmodifypercentage_customfield_desc": "您需要输入 0~ 100的数字。小数也是可以的。",
"setting_syncdirection": "同步方向",
"setting_syncdirection_desc": "插件应该向哪里同步?注意每个选项都是只有修改了的文件(基于修改时间和大小判断)才会触发同步动作。",
"setting_syncdirection_bidirectional_desc": "双向同步(默认)",
@ -279,9 +301,12 @@
"settings_importexport": "导入导出部分设置",
"settings_export": "导出",
"settings_export_desc": "用 QR 码或 URI 导出设置信息。",
"settings_export_all_but_oauth2_button": "导出非 Oauth2 部分",
"settings_export_basic_and_advanced_button": "导出基本或进阶设置",
"settings_export_s3_button": "导出 S3 部分",
"settings_export_dropbox_button": "导出 Dropbox 部分",
"settings_export_onedrive_button": "导出 OneDrive 部分",
"settings_export_webdav_button": "导出 Webdav 部分",
"settings_export_webdis_button": "导出 Webdis 部分",
"settings_import": "导入",
"settings_import_desc": "粘贴之前导出的 URI 到这里然后点击“导入”。或,使用拍摄 app 或者扫描 QR 码的 app来扫描对应的 QR 码。",
"settings_import_button": "导入",
@ -299,6 +324,7 @@
"settings_viewconsolelog_desc": "电脑上输入“ctrl+shift+i”或“cmd+shift+i”来查看终端输出。手机上安装第三方插件 <a href='https://obsidian.md/plugins?search=Logstravaganza'>Logstravaganza</a> 来导出终端输出到一篇笔记上。",
"settings_syncplans": "导出同步计划",
"settings_syncplans_desc": "每次您启动同步,并在实际上传下载前,插件会生成同步计划。它可以使您知道每次同步发生了什么。点击按钮可以导出同步计划。",
"settings_syncplans_button_1_only_change": "导出最近 1 次(仅修改部分)",
"settings_syncplans_button_1": "导出最近 1 次",
"settings_syncplans_button_5": "导出最近 5 次",
"settings_syncplans_button_all": "导出所有",
@ -328,5 +354,7 @@
"syncalgov3_checkbox_manual_backup": "我将会首先手动备份我的库Vault。",
"syncalgov3_checkbox_requiremultidevupdate": "我理解,我需要在所有设备上都更新此插件使之正常运行。",
"syncalgov3_button_agree": "同意",
"syncalgov3_button_disagree": "不同意"
"syncalgov3_button_disagree": "不同意",
"menu_check_file_stat": "查看文件属性"
}

View File

@ -9,9 +9,9 @@
"syncrun_syncingribbon": "{{pluginName}}:正在由 {{triggerSource}} 觸發執行",
"syncrun_step0": "0/8 Remotely Save 在空跑dry run模式不會發生實際的檔案交換。",
"syncrun_step1": "1/8 Remotely Save 準備同步({{serviceType}}",
"syncrun_step2": "2/8 正在獲取遠端的元資料。",
"syncrun_step3": "3/8 正在檢查密碼正確與否。",
"syncrun_step2": "2/8 正在獲取遠端的元數據和檢查密碼。",
"syncrun_passworderr": "檢查密碼時候出錯。",
"syncrun_step3": "3/8 正在處理遠端的元數據。",
"syncrun_step4": "4/8 正在獲取本地的元資料。",
"syncrun_step5": "5/8 正在獲取本地上一次同步的元資料。",
"syncrun_step6": "6/8 正在生成同步計劃。",
@ -38,6 +38,7 @@
"protocol_onedrive_connect_unknown": "不知道如何處理此 callback{{params}}",
"command_startsync": "開始同步start sync",
"command_drynrun": "開始同步空跑模式start sync (dry run only)",
"command_exportsyncplans_1_only_change": "匯出同步計劃(最近 1 次僅修改部分export sync plans (latest 1) (change part)",
"command_exportsyncplans_1": "匯出同步計劃(最近 1 次export sync plans (latest 1)",
"command_exportsyncplans_5": "匯出同步計劃(最近 5 次export sync plans (latest 5)",
"command_exportsyncplans_all": "匯出同步計劃所有export sync plans (all)",
@ -52,6 +53,7 @@
"statusbar_time_lessminute": "一分鐘之內同步",
"statusbar_lastsync": "上一次同步於:{{time}}",
"statusbar_syncing": "正在同步",
"statusbar_failed": "上次同步失敗了",
"statusbar_now": "剛同步完",
"statusbar_lastsync_label": "上一次同步於:{{date}}",
"statusbar_lastsync_never": "沒觸發過同步",
@ -218,6 +220,10 @@
"settings_onedrive_auth_button": "鑑權",
"settings_onedrive_connect_succ": "很好!我們可連線上 OneDrive",
"settings_onedrive_connect_fail": "我們未能連線上 OneDrive。",
"settings_onedrive_emptyfile": "空檔案處理",
"settings_onedrive_emptyfile_desc": "OneDrive 不允許上傳空檔案(即使官網也是不允許的)。那麼你想跳過空檔案還是返回錯誤?",
"settings_onedrive_emptyfile_skip": "跳過",
"settings_onedrive_emptyfile_error": "返回錯誤和中斷",
"settings_webdav": "Webdav 設定",
"settings_webdav_disclaimer1": "宣告:您所輸入的資訊儲存於本地。其它有害的或者出錯的外掛,是有可能讀取到這些資訊的。如果您發現了 Webdav 伺服器有不符合預期的訪問,請立刻修改使用者名稱和密碼。",
"settings_webdav_cors_os": "Obsidian 桌面版>=0.13.25 或 iOS>=1.1.1 或 Android>=1.1.1 支援跳過 CORS 設定。但您正在使用舊版,建議升級。",
@ -238,12 +244,25 @@
"settings_webdav_connect_succ": "很好!可以連線上 Webdav 伺服器。",
"settings_webdav_connect_fail": "無法連線上 Webdav 伺服器。(可能是地址/賬號/密碼/鑑權型別等錯誤。)",
"settings_webdav_connect_fail_withcors": "無法連線上 Webdav 伺服器。(可能是地址/賬號/密碼/鑑權型別/CORS 等錯誤。)",
"settings_webdis": "Webdis 設置",
"settings_webdis_disclaimer1": "聲明:此插件不是 Redis® Ltd 或 Redis® 軟件或 Wedis 的官方產品。Redis 是 Redis Ltd 的註冊商標。",
"settings_webdis_disclaimer2": "聲明:您所輸入的信息存儲於本地。其它有害的或者出錯的插件,是有可能讀取到這些信息的。如果您發現了 Webdis 服務器有不符合預期的訪問,請立刻修改用戶名和密碼。",
"settings_webdis_folder": "我們會在您的服務器上創建帶有此前綴的 key 並在裡面同步::{{remoteBaseDir}}。",
"settings_webdis_addr": "服務器地址",
"settings_webdis_addr_desc": "服務器地址",
"settings_webdis_user": "用戶名",
"settings_webdis_user_desc": "用戶名。注意:用戶名和其它信息都會保存在本地。",
"settings_webdis_password": "密碼",
"settings_webdis_password_desc": "密碼。注意:密碼和其它信息都會保存在本地。",
"settings_webdis_connect_succ": "很好!可以連接上 Webdis 服務器。",
"settings_webdis_connect_fail": "無法連接上 Webdis 服務器。(可能是地址/賬號/密碼/鑑權類型等錯誤。)",
"settings_chooseservice": "選擇遠端服務",
"settings_chooseservice_desc": "從這裡開始設定。您想連線到哪一個服務S3、Dropbox、Webdav、OneDrive個人版",
"settings_chooseservice_desc": "從這裡開始設定。您想連線到哪一個服務S3、Dropbox、Webdav、OneDrive個人版、Webdis",
"settings_chooseservice_s3": "S3 或相容 S3 的服務",
"settings_chooseservice_dropbox": "Dropbox",
"settings_chooseservice_webdav": "Webdav",
"settings_chooseservice_onedrive": "OneDrive個人版",
"settings_chooseservice_webdis": "Webdis (an HTTP interface for Redis®)",
"settings_adv": "進階設定",
"settings_concurrency": "並行度",
"settings_concurrency_desc": "您希望同時最多有多少個檔案被上傳和下載?預設值是 5。如果您遇到了一些問題如訪問頻率限制您可以減少並行度。",
@ -268,6 +287,9 @@
"settings_protectmodifypercentage_000_desc": "0總是強制中止",
"settings_protectmodifypercentage_050_desc": "50預設值",
"settings_protectmodifypercentage_100_desc": "100去除此保護",
"settings_protectmodifypercentage_custom_desc": "自定義",
"settings_protectmodifypercentage_customfield": "如果修改超過自定義百分比則中止同步",
"settings_protectmodifypercentage_customfield_desc": "您需要輸入 0~ 100的數字。小數也是可以的。",
"setting_syncdirection": "同步方向",
"setting_syncdirection_desc": "外掛應該向哪裡同步?注意每個選項都是隻有修改了的檔案(基於修改時間和大小判斷)才會觸發同步動作。",
"setting_syncdirection_bidirectional_desc": "雙向同步(預設)",
@ -278,9 +300,12 @@
"settings_importexport": "匯入匯出部分設定",
"settings_export": "匯出",
"settings_export_desc": "用 QR 碼或 URI 匯出設定資訊。",
"settings_export_all_but_oauth2_button": "匯出非 Oauth2 部分",
"settings_export_basic_and_advanced_button": "匯出基本或進階設定",
"settings_export_s3_button": "匯出 S3 部分",
"settings_export_dropbox_button": "匯出 Dropbox 部分",
"settings_export_onedrive_button": "匯出 OneDrive 部分",
"settings_export_webdav_button": "匯出 Webdav 部分",
"settings_export_webdis_button": "匯出 Webdis 部分",
"settings_import": "匯入",
"settings_import_desc": "貼上之前匯出的 URI 到這裡然後點選“匯入”。或,使用拍攝 app 或者掃描 QR 碼的 app來掃描對應的 QR 碼。",
"settings_import_button": "匯入",
@ -298,6 +323,7 @@
"settings_viewconsolelog_desc": "電腦上輸入“ctrl+shift+i”或“cmd+shift+i”來檢視終端輸出。手機上安裝第三方外掛 <a href='https://obsidian.md/plugins?search=Logstravaganza'>Logstravaganza</a> 來匯出終端輸出到一篇筆記上。",
"settings_syncplans": "匯出同步計劃",
"settings_syncplans_desc": "每次您啟動同步,並在實際上傳下載前,外掛會生成同步計劃。它可以使您知道每次同步發生了什麼。點選按鈕可以匯出同步計劃。",
"settings_syncplans_button_1_only_change": "匯出最近 1 次(僅修改部分)",
"settings_syncplans_button_1": "匯出最近 1 次",
"settings_syncplans_button_5": "匯出最近 5 次",
"settings_syncplans_button_all": "匯出所有",
@ -327,5 +353,7 @@
"syncalgov3_checkbox_manual_backup": "我將會首先手動備份我的庫Vault。",
"syncalgov3_checkbox_requiremultidevupdate": "我理解,我需要在所有裝置上都更新此外掛使之正常執行。",
"syncalgov3_button_agree": "同意",
"syncalgov3_button_disagree": "不同意"
"syncalgov3_button_disagree": "不同意",
"menu_check_file_stat": "檢視檔案屬性"
}

View File

@ -1,12 +1,14 @@
import localforage from "localforage";
import { extendPrototype } from "localforage-getitems";
extendPrototype(localforage);
import { extendPrototype as ep1 } from "localforage-getitems";
import { extendPrototype as ep2 } from "localforage-removeitems";
ep1(localforage);
ep2(localforage);
export type LocalForage = typeof localforage;
import { nanoid } from "nanoid";
import type { Entity, MixedEntity, SUPPORTED_SERVICES_TYPE } from "./baseTypes";
import type { SyncPlanType } from "./sync";
import type { Entity, SUPPORTED_SERVICES_TYPE } from "./baseTypes";
import { unixTimeToStr } from "./misc";
import type { SyncPlanType } from "./sync";
const DB_VERSION_NUMBER_IN_HISTORY = [20211114, 20220108, 20220326, 20240220];
export const DEFAULT_DB_VERSION_NUMBER: number = 20240220;
@ -18,6 +20,7 @@ export const DEFAULT_TBL_LOGGER_OUTPUT = "loggeroutput";
export const DEFAULT_TBL_SIMPLE_KV_FOR_MISC = "simplekvformisc";
export const DEFAULT_TBL_PREV_SYNC_RECORDS = "prevsyncrecords";
export const DEFAULT_TBL_PROFILER_RESULTS = "profilerresults";
export const DEFAULT_TBL_FILE_CONTENT_HISTORY = "filecontenthistory";
/**
* @deprecated
@ -60,6 +63,7 @@ export interface InternalDBs {
simpleKVForMiscTbl: LocalForage;
prevSyncRecordsTbl: LocalForage;
profilerResultsTbl: LocalForage;
fileContentHistoryTbl: LocalForage;
/**
* @deprecated
@ -219,6 +223,11 @@ export const prepareDBs = async (
name: DEFAULT_DB_NAME,
storeName: DEFAULT_TBL_SYNC_MAPPING,
}),
fileContentHistoryTbl: localforage.createInstance({
name: DEFAULT_DB_NAME,
storeName: DEFAULT_TBL_FILE_CONTENT_HISTORY,
}),
} as InternalDBs;
// try to get vaultRandomID firstly
@ -307,12 +316,15 @@ export const clearFileHistoryOfEverythingByVault = async (
db: InternalDBs,
vaultRandomID: string
) => {
const keys = await db.fileHistoryTbl.keys();
for (const key of keys) {
if (key.startsWith(`${vaultRandomID}\t`)) {
await db.fileHistoryTbl.removeItem(key);
}
}
const keys = (await db.fileHistoryTbl.keys()).filter((x) =>
x.startsWith(`${vaultRandomID}\t`)
);
await db.fileHistoryTbl.removeItems(keys);
// for (const key of keys) {
// if (key.startsWith(`${vaultRandomID}\t`)) {
// await db.fileHistoryTbl.removeItem(key);
// }
// }
};
/**
@ -339,12 +351,15 @@ export const clearAllSyncMetaMappingByVault = async (
db: InternalDBs,
vaultRandomID: string
) => {
const keys = await db.syncMappingTbl.keys();
for (const key of keys) {
if (key.startsWith(`${vaultRandomID}\t`)) {
await db.syncMappingTbl.removeItem(key);
}
}
const keys = (await db.syncMappingTbl.keys()).filter((x) =>
x.startsWith(`${vaultRandomID}\t`)
);
await db.syncMappingTbl.removeItems(keys);
// for (const key of keys) {
// if (key.startsWith(`${vaultRandomID}\t`)) {
// await db.syncMappingTbl.removeItem(key);
// }
// }
};
export const insertSyncPlanRecordByVault = async (
@ -400,7 +415,7 @@ export const clearExpiredSyncPlanRecords = async (db: InternalDBs) => {
const expiredTs = currTs - MILLISECONDS_OLD;
let records = (await db.syncPlansTbl.keys()).map((key) => {
const ts = parseInt(key.split("\t")[1]);
const ts = Number.parseInt(key.split("\t")[1]);
const expired = ts <= expiredTs;
return {
ts: ts,
@ -422,11 +437,12 @@ export const clearExpiredSyncPlanRecords = async (db: InternalDBs) => {
});
}
const ps = [] as Promise<void>[];
keysToRemove.forEach((element) => {
ps.push(db.syncPlansTbl.removeItem(element));
});
await Promise.all(ps);
// const ps = [] as Promise<void>[];
// keysToRemove.forEach((element) => {
// ps.push(db.syncPlansTbl.removeItem(element));
// });
// await Promise.all(ps);
await db.syncPlansTbl.removeItems(Array.from(keysToRemove));
};
export const getAllPrevSyncRecordsByVaultAndProfile = async (
@ -475,12 +491,10 @@ export const clearAllPrevSyncRecordByVault = async (
db: InternalDBs,
vaultRandomID: string
) => {
const keys = await db.prevSyncRecordsTbl.keys();
for (const key of keys) {
if (key.startsWith(`${vaultRandomID}\t`)) {
await db.prevSyncRecordsTbl.removeItem(key);
}
}
const keys = (await db.prevSyncRecordsTbl.keys()).filter((x) =>
x.startsWith(`${vaultRandomID}\t`)
);
await db.prevSyncRecordsTbl.removeItems(keys);
};
export const clearAllLoggerOutputRecords = async (db: InternalDBs) => {
@ -542,7 +556,7 @@ export const insertProfilerResultByVault = async (
// clear older one while writing
const records = (await db.profilerResultsTbl.keys())
.filter((x) => x.startsWith(`${vaultRandomID}\t`))
.map((x) => parseInt(x.split("\t")[1]));
.map((x) => Number.parseInt(x.split("\t")[1]));
records.sort((a, b) => -(a - b)); // descending
while (records.length > 5) {
const ts = records.pop()!;
@ -559,7 +573,7 @@ export const readAllProfilerResultsByVault = async (
if (key.startsWith(`${vaultRandomID}\t`)) {
records.push({
val: value as string,
ts: parseInt(key.split("\t")[1]),
ts: Number.parseInt(key.split("\t")[1]),
});
}
});

View File

@ -1,72 +1,86 @@
import {
Modal,
Notice,
Plugin,
Setting,
addIcon,
setIcon,
FileSystemAdapter,
Platform,
requireApiVersion,
Events,
} from "obsidian";
import cloneDeep from "lodash/cloneDeep";
import { createElement, RotateCcw, RefreshCcw, FileText } from "lucide";
import { FileText, RefreshCcw, RotateCcw, createElement } from "lucide";
import {
Events,
FileSystemAdapter,
type Modal,
Notice,
Platform,
Plugin,
type Setting,
TFolder,
addIcon,
requireApiVersion,
setIcon,
} from "obsidian";
import {
DEFAULT_PRO_CONFIG,
getAndSaveProEmail,
getAndSaveProFeatures,
sendAuthReq as sendAuthReqPro,
setConfigBySuccessfullAuthInplace as setConfigBySuccessfullAuthInplacePro,
} from "../pro/src/account";
import type {
RemotelySavePluginSettings,
SyncTriggerSourceType,
} from "./baseTypes";
import {
COMMAND_CALLBACK,
COMMAND_CALLBACK_ONEDRIVE,
COMMAND_CALLBACK_DROPBOX,
COMMAND_CALLBACK_ONEDRIVE,
COMMAND_URI,
API_VER_ENSURE_REQURL_OK,
} from "./baseTypes";
import { importQrCodeUri } from "./importExport";
import {
prepareDBs,
InternalDBs,
clearExpiredSyncPlanRecords,
upsertPluginVersionByVault,
clearAllLoggerOutputRecords,
upsertLastSuccessSyncTimeByVault,
getLastSuccessSyncTimeByVault,
} from "./localdb";
import { API_VER_ENSURE_REQURL_OK } from "./baseTypesObs";
import { messyConfigToNormal, normalConfigToMessy } from "./configPersist";
import {
DEFAULT_DROPBOX_CONFIG,
sendAuthReq as sendAuthReqDropbox,
setConfigBySuccessfullAuthInplace as setConfigBySuccessfullAuthInplaceDropbox,
} from "./fsDropbox";
import {
AccessCodeResponseSuccessfulType,
type AccessCodeResponseSuccessfulType,
DEFAULT_ONEDRIVE_CONFIG,
sendAuthReq as sendAuthReqOnedrive,
setConfigBySuccessfullAuthInplace as setConfigBySuccessfullAuthInplaceOnedrive,
} from "./fsOnedrive";
import { DEFAULT_S3_CONFIG } from "./fsS3";
import { DEFAULT_WEBDAV_CONFIG } from "./fsWebdav";
import { RemotelySaveSettingTab } from "./settings";
import { messyConfigToNormal, normalConfigToMessy } from "./configPersist";
import { I18n } from "./i18n";
import type { LangTypeAndAuto, TransItemType } from "./i18n";
import { importQrCodeUri } from "./importExport";
import {
type InternalDBs,
clearAllLoggerOutputRecords,
clearExpiredSyncPlanRecords,
getLastSuccessSyncTimeByVault,
prepareDBs,
upsertLastSuccessSyncTimeByVault,
upsertPluginVersionByVault,
} from "./localdb";
import { RemotelySaveSettingTab } from "./settings";
import { SyncAlgoV3Modal } from "./syncAlgoV3Notice";
// biome-ignore lint/suspicious/noShadowRestrictedNames: <explanation>
import AggregateError from "aggregate-error";
import { exportVaultSyncPlansToFiles } from "./debugMode";
import { changeMobileStatusBar } from "./misc";
import { Profiler } from "./profiler";
import { FakeFsLocal } from "./fsLocal";
import { FakeFsEncrypt } from "./fsEncrypt";
import { syncer } from "./sync";
import { getClient } from "./fsGetter";
import throttle from "lodash/throttle";
import { COMMAND_CALLBACK_PRO } from "../pro/src/baseTypesPro";
import { DEFAULT_GOOGLEDRIVE_CONFIG } from "../pro/src/fsGoogleDrive";
import { exportVaultSyncPlansToFiles } from "./debugMode";
import { FakeFsEncrypt } from "./fsEncrypt";
import { getClient } from "./fsGetter";
import { FakeFsLocal } from "./fsLocal";
import { DEFAULT_WEBDIS_CONFIG } from "./fsWebdis";
import { changeMobileStatusBar } from "./misc";
import { DEFAULT_PROFILER_CONFIG, type Profiler } from "./profiler";
import { syncer } from "./sync";
const DEFAULT_SETTINGS: RemotelySavePluginSettings = {
s3: DEFAULT_S3_CONFIG,
webdav: DEFAULT_WEBDAV_CONFIG,
dropbox: DEFAULT_DROPBOX_CONFIG,
onedrive: DEFAULT_ONEDRIVE_CONFIG,
webdis: DEFAULT_WEBDIS_CONFIG,
googledrive: DEFAULT_GOOGLEDRIVE_CONFIG,
password: "",
serviceType: "s3",
currLogLevel: "info",
@ -92,6 +106,8 @@ const DEFAULT_SETTINGS: RemotelySavePluginSettings = {
obfuscateSettingFile: true,
enableMobileStatusBar: false,
encryptionMethod: "unknown",
profiler: DEFAULT_PROFILER_CONFIG,
pro: DEFAULT_PRO_CONFIG,
};
interface OAuth2Info {
@ -147,7 +163,12 @@ export default class RemotelySavePlugin extends Plugin {
appContainerObserver?: MutationObserver;
async syncRun(triggerSource: SyncTriggerSourceType = "manual") {
const profiler = new Profiler();
// const profiler = new Profiler(
// undefined,
// this.settings.profiler?.enablePrinting ?? false,
// this.settings.profiler?.recordSize ?? false
// );
const profiler: Profiler | undefined = undefined;
const fsLocal = new FakeFsLocal(
this.app.vault,
this.settings.syncConfigDir ?? false,
@ -297,7 +318,6 @@ export default class RemotelySavePlugin extends Plugin {
default:
throw Error(`unknown step=${step} for showing notice`);
break;
}
};
@ -328,17 +348,21 @@ export default class RemotelySavePlugin extends Plugin {
// last step
if (this.syncRibbon !== undefined) {
setIcon(this.syncRibbon, iconNameSyncWait);
let originLabel = `${this.manifest.name}`;
const originLabel = `${this.manifest.name}`;
this.syncRibbon.setAttribute("aria-label", originLabel);
}
}
};
const statusBarFunc = async (s: SyncTriggerSourceType, step: number) => {
const statusBarFunc = async (
s: SyncTriggerSourceType,
step: number,
everythingOk: boolean
) => {
if (step === 1) {
// change status to "syncing..." on statusbar
this.updateLastSuccessSyncMsg(-1);
} else if (step === 8) {
} else if (step === 8 && everythingOk) {
const lastSuccessSyncMillis = Date.now();
await upsertLastSuccessSyncTimeByVault(
this.db,
@ -346,6 +370,8 @@ export default class RemotelySavePlugin extends Plugin {
lastSuccessSyncMillis
);
this.updateLastSuccessSyncMsg(lastSuccessSyncMillis);
} else if (!everythingOk) {
this.updateLastSuccessSyncMsg(-2); // magic number
}
};
@ -384,6 +410,8 @@ export default class RemotelySavePlugin extends Plugin {
return;
}
const configSaver = async () => await this.saveSettings();
await syncer(
fsLocal,
fsRemote,
@ -395,6 +423,8 @@ export default class RemotelySavePlugin extends Plugin {
this.vaultRandomID,
this.app.vault.configDir,
this.settings,
this.manifest.version,
configSaver,
getProtectError,
markIsSyncingFunc,
notifyFunc,
@ -405,7 +435,7 @@ export default class RemotelySavePlugin extends Plugin {
);
fsEncrypt.closeResources();
profiler.clear();
(profiler as Profiler | undefined)?.clear();
this.syncEvent?.trigger("SYNC_DONE");
}
@ -532,7 +562,7 @@ export default class RemotelySavePlugin extends Plugin {
return;
}
let authRes = await sendAuthReqDropbox(
const authRes = await sendAuthReqDropbox(
this.settings.dropbox.clientID,
this.oauth2Info.verifier,
inputParams.code,
@ -617,7 +647,7 @@ export default class RemotelySavePlugin extends Plugin {
});
}
let rsp = await sendAuthReqOnedrive(
const rsp = await sendAuthReqOnedrive(
this.settings.onedrive.clientID,
this.settings.onedrive.authority,
inputParams.code,
@ -681,6 +711,77 @@ export default class RemotelySavePlugin extends Plugin {
}
);
this.registerObsidianProtocolHandler(
COMMAND_CALLBACK_PRO,
async (inputParams) => {
if (this.oauth2Info.helperModal !== undefined) {
const k = this.oauth2Info.helperModal.contentEl;
k.empty();
t("protocol_pro_connecting")
.split("\n")
.forEach((val) => {
k.createEl("p", {
text: val,
});
});
}
console.debug(inputParams);
const authRes = await sendAuthReqPro(
this.oauth2Info.verifier || "verifier",
inputParams.code,
async (e: any) => {
new Notice(t("protocol_pro_connect_fail"));
new Notice(`${e}`);
throw e;
}
);
console.debug(authRes);
const self = this;
await setConfigBySuccessfullAuthInplacePro(
this.settings.pro!,
authRes,
() => self.saveSettings()
);
await getAndSaveProFeatures(
this.settings.pro!,
this.manifest.version,
() => self.saveSettings()
);
await getAndSaveProEmail(
this.settings.pro!,
this.manifest.version,
() => self.saveSettings()
);
this.oauth2Info.verifier = ""; // reset it
this.oauth2Info.helperModal?.close(); // close it
this.oauth2Info.helperModal = undefined;
this.oauth2Info.authDiv?.toggleClass(
"pro-auth-button-hide",
this.settings.pro?.refreshToken !== ""
);
this.oauth2Info.authDiv = undefined;
this.oauth2Info.revokeAuthSetting?.setDesc(
t("protocol_pro_connect_succ_revoke", {
email: this.settings.pro?.email,
})
);
this.oauth2Info.revokeAuthSetting = undefined;
this.oauth2Info.revokeDiv?.toggleClass(
"pro-revoke-auth-button-hide",
this.settings.pro?.email === ""
);
this.oauth2Info.revokeDiv = undefined;
}
);
this.syncRibbon = this.addRibbonIcon(
iconNameSyncWait,
`${this.manifest.name}`,
@ -730,6 +831,22 @@ export default class RemotelySavePlugin extends Plugin {
},
});
this.addCommand({
id: "export-sync-plans-1-only-change",
name: t("command_exportsyncplans_1_only_change"),
icon: iconNameLogs,
callback: async () => {
await exportVaultSyncPlansToFiles(
this.db,
this.app.vault,
this.vaultRandomID,
1,
true
);
new Notice(t("settings_syncplans_notice"));
},
});
this.addCommand({
id: "export-sync-plans-1",
name: t("command_exportsyncplans_1"),
@ -739,7 +856,8 @@ export default class RemotelySavePlugin extends Plugin {
this.db,
this.app.vault,
this.vaultRandomID,
1
1,
false
);
new Notice(t("settings_syncplans_notice"));
},
@ -754,7 +872,8 @@ export default class RemotelySavePlugin extends Plugin {
this.db,
this.app.vault,
this.vaultRandomID,
5
5,
false
);
new Notice(t("settings_syncplans_notice"));
},
@ -769,7 +888,8 @@ export default class RemotelySavePlugin extends Plugin {
this.db,
this.app.vault,
this.vaultRandomID,
-1
-1,
false
);
new Notice(t("settings_syncplans_notice"));
},
@ -781,6 +901,8 @@ export default class RemotelySavePlugin extends Plugin {
// console.info("click", evt);
// });
this.enableCheckingFileStat();
if (!this.settings.agreeToUseSyncV3) {
const syncAlgoV3Modal = new SyncAlgoV3Modal(this.app, this);
syncAlgoV3Modal.open();
@ -838,6 +960,9 @@ export default class RemotelySavePlugin extends Plugin {
if (this.settings.onedrive.remoteBaseDir === undefined) {
this.settings.onedrive.remoteBaseDir = "";
}
if (this.settings.onedrive.emptyFile === undefined) {
this.settings.onedrive.emptyFile = "skip";
}
if (this.settings.webdav.manualRecursive === undefined) {
this.settings.webdav.manualRecursive = true;
}
@ -929,6 +1054,20 @@ export default class RemotelySavePlugin extends Plugin {
}
}
if (this.settings.profiler === undefined) {
this.settings.profiler = DEFAULT_PROFILER_CONFIG;
}
if (this.settings.profiler.enablePrinting === undefined) {
this.settings.profiler.enablePrinting = false;
}
if (this.settings.profiler.recordSize === undefined) {
this.settings.profiler.recordSize = false;
}
if (this.settings.googledrive === undefined) {
this.settings.googledrive = DEFAULT_GOOGLEDRIVE_CONFIG;
}
await this.saveSettings();
}
@ -952,7 +1091,7 @@ export default class RemotelySavePlugin extends Plugin {
}
async checkIfOauthExpires() {
let needSave: boolean = false;
let needSave = false;
const current = Date.now();
// fullfill old version settings
@ -997,6 +1136,10 @@ export default class RemotelySavePlugin extends Plugin {
needSave = true;
}
if (this.settings.pro === undefined) {
this.settings.pro = cloneDeep(DEFAULT_PRO_CONFIG);
}
// save back
if (needSave) {
await this.saveSettings();
@ -1196,6 +1339,41 @@ export default class RemotelySavePlugin extends Plugin {
});
}
enableCheckingFileStat() {
this.app.workspace.onLayoutReady(() => {
const t = (x: TransItemType, vars?: any) => {
return this.i18n.t(x, vars);
};
this.registerEvent(
this.app.workspace.on("file-menu", (menu, file) => {
if (file instanceof TFolder) {
// folder not supported yet
return;
}
menu.addItem((item) => {
item
.setTitle(t("menu_check_file_stat"))
.setIcon("file-cog")
.onClick(async () => {
const filePath = file.path;
const fsLocal = new FakeFsLocal(
this.app.vault,
this.settings.syncConfigDir ?? false,
this.app.vault.configDir,
this.manifest.id,
undefined,
this.settings.deleteToWhere ?? "system"
);
const s = await fsLocal.stat(filePath);
new Notice(JSON.stringify(s, null, 2), 10000);
});
});
})
);
});
}
async saveAgreeToUseNewSyncAlgorithm() {
this.settings.agreeToUseSyncV3 = true;
await this.saveSettings();
@ -1226,6 +1404,11 @@ export default class RemotelySavePlugin extends Plugin {
lastSyncMsg = t("statusbar_syncing");
}
if (lastSuccessSyncMillis !== undefined && lastSuccessSyncMillis === -2) {
lastSyncMsg = t("statusbar_failed");
lastSyncLabelMsg = t("statusbar_failed");
}
if (lastSuccessSyncMillis !== undefined && lastSuccessSyncMillis > 0) {
const deltaTime = Date.now() - lastSuccessSyncMillis;
@ -1258,7 +1441,7 @@ export default class RemotelySavePlugin extends Plugin {
timeText = t("statusbar_now");
}
let dateText = new Date(lastSuccessSyncMillis).toLocaleTimeString(
const dateText = new Date(lastSuccessSyncMillis).toLocaleTimeString(
navigator.language,
{
weekday: "long",

View File

@ -1,9 +1,9 @@
import { Platform, Vault } from "obsidian";
import * as path from "path";
import type { Vault } from "obsidian";
import { base32, base64url } from "rfc4648";
import XRegExp from "xregexp";
import emojiRegex from "emoji-regex";
import { base32 } from "rfc4648";
import XRegExp from "xregexp";
declare global {
interface Window {
@ -18,11 +18,7 @@ declare global {
* @param underscore
* @returns
*/
export const isHiddenPath = (
item: string,
dot: boolean = true,
underscore: boolean = true
) => {
export const isHiddenPath = (item: string, dot = true, underscore = true) => {
if (!(dot || underscore)) {
throw Error("parameter error for isHiddenPath");
}
@ -50,7 +46,7 @@ export const isHiddenPath = (
* @param x string
* @returns string[] might be empty
*/
export const getFolderLevels = (x: string, addEndingSlash: boolean = false) => {
export const getFolderLevels = (x: string, addEndingSlash = false) => {
const res: string[] = [];
if (x === "" || x === "/") {
@ -58,7 +54,7 @@ export const getFolderLevels = (x: string, addEndingSlash: boolean = false) => {
}
const y1 = x.split("/");
let i = 0;
const i = 0;
for (let index = 0; index + 1 < y1.length; index++) {
let k = y1.slice(0, index + 1).join("/");
if (k === "" || k === "/") {
@ -119,7 +115,7 @@ export const base64ToArrayBuffer = (b64text: string) => {
};
export const copyArrayBuffer = (src: ArrayBuffer) => {
var dst = new ArrayBuffer(src.byteLength);
const dst = new ArrayBuffer(src.byteLength);
new Uint8Array(dst).set(new Uint8Array(src));
return dst;
};
@ -134,18 +130,14 @@ export const hexStringToTypedArray = (hex: string) => {
if (f === null) {
throw Error(`input ${hex} is not hex, no way to transform`);
}
return new Uint8Array(
f.map(function (h) {
return parseInt(h, 16);
})
);
return new Uint8Array(f.map((h) => Number.parseInt(h, 16)));
};
export const base64ToBase32 = (a: string) => {
return base32.stringify(Buffer.from(a, "base64"));
};
export const base64ToBase64url = (a: string, pad: boolean = false) => {
export const base64ToBase64url = (a: string, pad = false) => {
let b = a.replace(/\+/g, "-").replace(/\//g, "_");
if (!pad) {
b = b.replace(/=/g, "");
@ -190,7 +182,7 @@ export const hasEmojiInText = (a: string) => {
* @param toLower
* @returns
*/
export const headersToRecord = (h: Headers, toLower: boolean = true) => {
export const headersToRecord = (h: Headers, toLower = true) => {
const res: Record<string, string> = {};
h.forEach((v, k) => {
if (toLower) {
@ -240,11 +232,11 @@ export const getParentFolder = (a: string) => {
* @param delimiter
* @returns
*/
export const setToString = (a: Set<string>, delimiter: string = ",") => {
export const setToString = (a: Set<string>, delimiter = ",") => {
return [...a].join(delimiter);
};
export const extractSvgSub = (x: string, subEl: string = "rect") => {
export const extractSvgSub = (x: string, subEl = "rect") => {
const parser = new window.DOMParser();
const dom = parser.parseFromString(x, "image/svg+xml");
const svg = dom.querySelector("svg")!;
@ -261,10 +253,10 @@ export const extractSvgSub = (x: string, subEl: string = "rect") => {
export const getRandomIntInclusive = (min: number, max: number) => {
const randomBuffer = new Uint32Array(1);
window.crypto.getRandomValues(randomBuffer);
let randomNumber = randomBuffer[0] / (0xffffffff + 1);
min = Math.ceil(min);
max = Math.floor(max);
return Math.floor(randomNumber * (max - min + 1)) + min;
const randomNumber = randomBuffer[0] / (0xffffffff + 1);
const min2 = Math.ceil(min);
const max2 = Math.floor(max);
return Math.floor(randomNumber * (max2 - min2 + 1)) + min2;
};
/**
@ -349,11 +341,17 @@ export const checkHasSpecialCharForDir = (x: string) => {
return /[?/\\]/.test(x);
};
export const unixTimeToStr = (x: number | undefined | null) => {
export const unixTimeToStr = (x: number | undefined | null, hasMs = false) => {
if (x === undefined || x === null || Number.isNaN(x)) {
return undefined;
}
return window.moment(x).format() as string;
if (hasMs) {
// 1716712162574 => '2024-05-26T16:29:22.574+08:00'
return window.moment(x).toISOString(true);
} else {
// 1716712162574 => '2024-05-26T16:29:22+08:00'
return window.moment(x).format() as string;
}
};
/**
@ -397,9 +395,8 @@ export const toText = (x: any) => {
if (
x instanceof Error ||
(x &&
x.stack &&
x.message &&
(x?.stack &&
x?.message &&
typeof x.stack === "string" &&
typeof x.message === "string")
) {
@ -425,7 +422,7 @@ export const toText = (x: any) => {
export const statFix = async (vault: Vault, path: string) => {
const s = await vault.adapter.stat(path);
if (s === undefined || s === null) {
return s;
throw Error(`${path} doesn't exist cannot run stat`);
}
if (s.ctime === undefined || s.ctime === null || Number.isNaN(s.ctime)) {
s.ctime = undefined as any; // force assignment
@ -446,7 +443,7 @@ export const isSpecialFolderNameToSkip = (
x: string,
more: string[] | undefined
) => {
let specialFolders = [
const specialFolders = [
".git",
".github",
".gitlab",
@ -595,6 +592,7 @@ export const changeMobileStatusBar = (
if (oldAppContainerObserver !== undefined) {
console.debug(`disconnect oldAppContainerObserver`);
oldAppContainerObserver.disconnect();
// biome-ignore lint/style/noParameterAssign: we want gc
oldAppContainerObserver = undefined;
}
statusbar.style.removeProperty("display");
@ -631,7 +629,6 @@ export const fixEntityListCasesInplace = (entities: { keyRaw: string }[]) => {
caseMapping[newKeyRaw.toLocaleLowerCase()] = newKeyRaw;
e.keyRaw = newKeyRaw;
// console.log(JSON.stringify(caseMapping,null,2));
continue;
} else {
throw Error(`${parentFolder} doesn't have cases record??`);
}
@ -642,7 +639,6 @@ export const fixEntityListCasesInplace = (entities: { keyRaw: string }[]) => {
.slice(-1)
.join("/")}`;
e.keyRaw = newKeyRaw;
continue;
} else {
throw Error(`${parentFolder} doesn't have cases record??`);
}
@ -651,3 +647,72 @@ export const fixEntityListCasesInplace = (entities: { keyRaw: string }[]) => {
return entities;
};
/**
* https://stackoverflow.com/questions/1248302/how-to-get-the-size-of-a-javascript-object
* @param object
* @returns bytes
*/
export const roughSizeOfObject = (object: any) => {
const objectList: any[] = [];
const stack = [object];
let bytes = 0;
while (stack.length) {
const value = stack.pop();
switch (typeof value) {
case "boolean":
bytes += 4;
break;
case "string":
bytes += value.length * 2;
break;
case "number":
bytes += 8;
break;
case "object":
if (!objectList.includes(value)) {
objectList.push(value);
for (const prop in value) {
if (value.hasOwnProperty(prop)) {
stack.push(value[prop]);
}
}
}
break;
}
}
return bytes;
};
export const splitFileSizeToChunkRanges = (
totalSize: number,
chunkSize: number
) => {
if (totalSize < 0) {
throw Error(`totalSize should not be negative`);
}
if (chunkSize <= 0) {
throw Error(`chunkSize should not be negative or zero`);
}
if (totalSize === 0) {
return [];
}
if (totalSize <= chunkSize) {
return [{ start: 0, end: totalSize - 1 }];
}
const res: { start: number; end: number }[] = [];
const blocksCount = Math.ceil((totalSize * 1.0) / chunkSize);
for (let i = 0; i < blocksCount; ++i) {
res.push({
start: i * chunkSize,
end: Math.min((i + 1) * chunkSize - 1, totalSize - 1),
});
}
return res;
};

View File

@ -1,10 +1,10 @@
import type { Vault, Stat, ListedFiles } from "obsidian";
import type { Entity, MixedEntity } from "./baseTypes";
import type { ListedFiles, Vault } from "obsidian";
import type { Entity } from "./baseTypes";
import { Queue } from "@fyears/tsqueue";
import chunk from "lodash/chunk";
import flatten from "lodash/flatten";
import { statFix, isSpecialFolderNameToSkip } from "./misc";
import { isSpecialFolderNameToSkip, statFix } from "./misc";
const isPluginDirItself = (x: string, pluginId: string) => {
return (

View File

@ -1,29 +1,43 @@
import { SUPPORTED_SERVICES_TYPE } from "./baseTypes";
import { InternalDBs, insertProfilerResultByVault } from "./localdb";
import { unixTimeToStr } from "./misc";
import type { ProfilerConfig, SUPPORTED_SERVICES_TYPE } from "./baseTypes";
import { type InternalDBs, insertProfilerResultByVault } from "./localdb";
import { roughSizeOfObject, unixTimeToStr } from "./misc";
interface BreakPoint {
label: string;
fakeTimeMilli: number; // it's NOT a unix timestamp
indent: number;
size?: number;
}
export const DEFAULT_PROFILER_CONFIG: ProfilerConfig = {
enablePrinting: false,
recordSize: false,
};
export class Profiler {
startTime: number;
breakPoints: BreakPoint[];
indent: number;
constructor(label?: string) {
enablePrinting: boolean;
recordSize: boolean;
constructor(label?: string, enablePrinting?: boolean, recordSize?: boolean) {
this.breakPoints = [];
this.indent = 0;
this.startTime = 0;
this.enablePrinting = enablePrinting ?? false;
this.recordSize = recordSize ?? false;
if (label !== undefined) {
this.startTime = Date.now();
this.breakPoints.push({
const p = {
label: label,
fakeTimeMilli: performance.now(),
indent: this.indent,
});
};
this.breakPoints.push(p);
if (this.enablePrinting) {
console.debug(this.toString(-1));
}
}
}
@ -31,11 +45,36 @@ export class Profiler {
if (this.breakPoints.length === 0) {
this.startTime = Date.now();
}
this.breakPoints.push({
const p = {
label: label,
fakeTimeMilli: performance.now(),
indent: this.indent,
});
};
this.breakPoints.push(p);
if (this.enablePrinting) {
console.debug(this.toString(-1));
}
return this;
}
insertSize(label: string, obj: any) {
if (!this.recordSize) {
return;
}
if (this.breakPoints.length === 0) {
this.startTime = Date.now();
}
const p = {
label: label,
fakeTimeMilli: performance.now(),
indent: this.indent,
size: roughSizeOfObject(obj),
};
this.breakPoints.push(p);
if (this.enablePrinting) {
console.debug(this.toString(-1));
}
return this;
}
@ -57,7 +96,31 @@ export class Profiler {
return this;
}
toString() {
toString(idx?: number) {
if (idx !== undefined) {
let i = idx;
if (idx < 0) {
i = this.breakPoints.length + idx;
}
const label = this.breakPoints?.[i]["label"];
const indent = this.breakPoints?.[i]["indent"];
let millsec = 0;
if (i >= 1) {
millsec =
Math.round(
(this.breakPoints?.[i]["fakeTimeMilli"] -
this.breakPoints?.[i - 1]["fakeTimeMilli"]) *
10
) / 10.0;
}
let res = `${" ".repeat(indent)}[${label}]: ${millsec}ms`;
if (this.breakPoints[i].hasOwnProperty("size")) {
const size = this.breakPoints[i].size as number;
res += `, size=${size}`;
}
return res;
}
if (this.breakPoints.length === 0) {
return "nothing in profiler";
}
@ -67,15 +130,7 @@ export class Profiler {
if (i === 0) {
res += `\n[${this.breakPoints[i]["label"]}]: start`;
} else {
const label = this.breakPoints[i]["label"];
const indent = this.breakPoints[i]["indent"];
const millsec =
Math.round(
(this.breakPoints[i]["fakeTimeMilli"] -
this.breakPoints[i - 1]["fakeTimeMilli"]) *
10
) / 10.0;
res += `\n${" ".repeat(indent)}[${label}]: ${millsec}ms`;
res += `\n${this.toString(i)}`;
}
}

View File

@ -1,34 +1,47 @@
import { Eye, EyeOff, createElement } from "lucide";
import {
App,
type App,
Modal,
Notice,
Platform,
PluginSettingTab,
Setting,
Platform,
requireApiVersion,
requestUrl,
} from "obsidian";
import type { TextComponent } from "obsidian";
import { createElement, Eye, EyeOff } from "lucide";
import {
API_VER_ENSURE_REQURL_OK,
API_VER_REQURL,
import type {
CipherMethodType,
ConflictActionType,
DEFAULT_DEBUG_FOLDER,
EmptyFolderCleanType,
QRExportType,
SUPPORTED_SERVICES_TYPE,
SUPPORTED_SERVICES_TYPE_WITH_REMOTE_BASE_DIR,
SyncDirectionType,
VALID_REQURL,
WebdavAuthType,
WebdavDepthType,
CipherMethodType,
QRExportType,
} from "./baseTypes";
import cloneDeep from "lodash/cloneDeep";
import { generateGoogleDriveSettingsPart } from "../pro/src/settingsGoogleDrive";
import { generateProSettingsPart } from "../pro/src/settingsPro";
import { API_VER_ENSURE_REQURL_OK, VALID_REQURL } from "./baseTypesObs";
import { messyConfigToNormal } from "./configPersist";
import {
exportVaultProfilerResultsToFiles,
exportVaultSyncPlansToFiles,
} from "./debugMode";
import {
DEFAULT_DROPBOX_CONFIG,
getAuthUrlAndVerifier as getAuthUrlAndVerifierDropbox,
sendAuthReq as sendAuthReqDropbox,
setConfigBySuccessfullAuthInplace,
} from "./fsDropbox";
import { getClient } from "./fsGetter";
import {
DEFAULT_ONEDRIVE_CONFIG,
getAuthUrlAndVerifier as getAuthUrlAndVerifierOnedrive,
} from "./fsOnedrive";
import { simpleTransRemotePrefix } from "./fsS3";
import type { TransItemType } from "./i18n";
import {
exportQrCodeUri,
importQrCodeUri,
@ -41,27 +54,12 @@ import {
upsertLastSuccessSyncTimeByVault,
} from "./localdb";
import type RemotelySavePlugin from "./main"; // unavoidable
import { FakeFs } from "./fsAll";
import {
DEFAULT_DROPBOX_CONFIG,
getAuthUrlAndVerifier as getAuthUrlAndVerifierDropbox,
sendAuthReq as sendAuthReqDropbox,
setConfigBySuccessfullAuthInplace,
} from "./fsDropbox";
import {
DEFAULT_ONEDRIVE_CONFIG,
getAuthUrlAndVerifier as getAuthUrlAndVerifierOnedrive,
} from "./fsOnedrive";
import { messyConfigToNormal } from "./configPersist";
import type { TransItemType } from "./i18n";
import {
changeMobileStatusBar,
checkHasSpecialCharForDir,
stringToFragment,
} from "./misc";
import { simpleTransRemotePrefix } from "./fsS3";
import cloneDeep from "lodash/cloneDeep";
import { getClient } from "./fsGetter";
import { DEFAULT_PROFILER_CONFIG } from "./profiler";
class PasswordModal extends Modal {
plugin: RemotelySavePlugin;
@ -73,7 +71,7 @@ class PasswordModal extends Modal {
}
onOpen() {
let { contentEl } = this;
const { contentEl } = this;
const t = (x: TransItemType, vars?: any) => {
return this.plugin.i18n.t(x, vars);
@ -128,7 +126,7 @@ class PasswordModal extends Modal {
}
onClose() {
let { contentEl } = this;
const { contentEl } = this;
contentEl.empty();
}
}
@ -141,7 +139,7 @@ class EncryptionMethodModal extends Modal {
}
onOpen() {
let { contentEl } = this;
const { contentEl } = this;
const t = (x: TransItemType, vars?: any) => {
return this.plugin.i18n.t(x, vars);
@ -167,12 +165,12 @@ class EncryptionMethodModal extends Modal {
}
onClose() {
let { contentEl } = this;
const { contentEl } = this;
contentEl.empty();
}
}
class ChangeRemoteBaseDirModal extends Modal {
export class ChangeRemoteBaseDirModal extends Modal {
readonly plugin: RemotelySavePlugin;
readonly newRemoteBaseDir: string;
readonly service: SUPPORTED_SERVICES_TYPE_WITH_REMOTE_BASE_DIR;
@ -189,7 +187,7 @@ class ChangeRemoteBaseDirModal extends Modal {
}
onOpen() {
let { contentEl } = this;
const { contentEl } = this;
const t = (x: TransItemType, vars?: any) => {
return this.plugin.i18n.t(x, vars);
@ -261,7 +259,7 @@ class ChangeRemoteBaseDirModal extends Modal {
}
onClose() {
let { contentEl } = this;
const { contentEl } = this;
contentEl.empty();
}
}
@ -280,7 +278,7 @@ class ChangeRemotePrefixModal extends Modal {
}
onOpen() {
let { contentEl } = this;
const { contentEl } = this;
const t = (x: TransItemType, vars?: any) => {
return this.plugin.i18n.t(x, vars);
@ -343,7 +341,7 @@ class ChangeRemotePrefixModal extends Modal {
}
onClose() {
let { contentEl } = this;
const { contentEl } = this;
contentEl.empty();
}
}
@ -368,7 +366,7 @@ class DropboxAuthModal extends Modal {
}
async onOpen() {
let { contentEl } = this;
const { contentEl } = this;
const t = (x: TransItemType, vars?: any) => {
return this.plugin.i18n.t(x, vars);
@ -506,7 +504,7 @@ class DropboxAuthModal extends Modal {
}
onClose() {
let { contentEl } = this;
const { contentEl } = this;
contentEl.empty();
}
}
@ -531,7 +529,7 @@ export class OnedriveAuthModal extends Modal {
}
async onOpen() {
let { contentEl } = this;
const { contentEl } = this;
const { authUrl, verifier } = await getAuthUrlAndVerifierOnedrive(
this.plugin.settings.onedrive.clientID,
@ -580,7 +578,7 @@ export class OnedriveAuthModal extends Modal {
}
onClose() {
let { contentEl } = this;
const { contentEl } = this;
contentEl.empty();
}
}
@ -602,7 +600,7 @@ export class OnedriveRevokeAuthModal extends Modal {
}
async onOpen() {
let { contentEl } = this;
const { contentEl } = this;
const t = (x: TransItemType, vars?: any) => {
return this.plugin.i18n.t(x, vars);
};
@ -650,7 +648,7 @@ export class OnedriveRevokeAuthModal extends Modal {
}
onClose() {
let { contentEl } = this;
const { contentEl } = this;
contentEl.empty();
}
}
@ -669,7 +667,7 @@ class SyncConfigDirModal extends Modal {
}
async onOpen() {
let { contentEl } = this;
const { contentEl } = this;
const t = (x: TransItemType, vars?: any) => {
return this.plugin.i18n.t(x, vars);
@ -703,7 +701,7 @@ class SyncConfigDirModal extends Modal {
}
onClose() {
let { contentEl } = this;
const { contentEl } = this;
contentEl.empty();
}
}
@ -718,7 +716,7 @@ class ExportSettingsQrCodeModal extends Modal {
}
async onOpen() {
let { contentEl } = this;
const { contentEl } = this;
const t = (x: TransItemType, vars?: any) => {
return this.plugin.i18n.t(x, vars);
@ -767,7 +765,7 @@ class ExportSettingsQrCodeModal extends Modal {
}
onClose() {
let { contentEl } = this;
const { contentEl } = this;
contentEl.empty();
}
}
@ -807,7 +805,7 @@ export class RemotelySaveSettingTab extends PluginSettingTab {
}
display(): void {
let { containerEl } = this;
const { containerEl } = this;
containerEl.style.setProperty("overflow-wrap", "break-word");
containerEl.empty();
@ -1004,7 +1002,7 @@ export class RemotelySaveSettingTab extends PluginSettingTab {
dropdown
.setValue(`${this.plugin.settings.s3.partsConcurrency}`)
.onChange(async (val) => {
const realVal = parseInt(val);
const realVal = Number.parseInt(val);
this.plugin.settings.s3.partsConcurrency = realVal;
await this.plugin.saveSettings();
});
@ -1083,7 +1081,11 @@ export class RemotelySaveSettingTab extends PluginSettingTab {
dropdown
.setValue(
`${this.plugin.settings.s3.generateFolderObject ? "generate" : "notgenerate"}`
`${
this.plugin.settings.s3.generateFolderObject
? "generate"
: "notgenerate"
}`
)
.onChange(async (val) => {
if (val === "generate") {
@ -1171,7 +1173,6 @@ export class RemotelySaveSettingTab extends PluginSettingTab {
button.setButtonText(t("settings_dropbox_revoke_button"));
button.onClick(async () => {
try {
const self = this;
const client = getClient(
this.plugin.settings,
this.app.vault.getName(),
@ -1282,7 +1283,6 @@ export class RemotelySaveSettingTab extends PluginSettingTab {
button.setButtonText(t("settings_checkonnectivity_button"));
button.onClick(async () => {
new Notice(t("settings_checkonnectivity_checking"));
const self = this;
const client = getClient(
this.plugin.settings,
this.app.vault.getName(),
@ -1420,6 +1420,20 @@ export class RemotelySaveSettingTab extends PluginSettingTab {
});
});
new Setting(onedriveDiv)
.setName(t("settings_onedrive_emptyfile"))
.setDesc(t("settings_onedrive_emptyfile_desc"))
.addDropdown(async (dropdown) => {
dropdown
.addOption("skip", t("settings_onedrive_emptyfile_skip"))
.addOption("error", t("settings_onedrive_emptyfile_error"))
.setValue(this.plugin.settings.onedrive.emptyFile)
.onChange(async (val) => {
this.plugin.settings.onedrive.emptyFile = val as any;
await this.plugin.saveSettings();
});
});
new Setting(onedriveDiv)
.setName(t("settings_checkonnectivity"))
.setDesc(t("settings_checkonnectivity_desc"))
@ -1427,7 +1441,6 @@ export class RemotelySaveSettingTab extends PluginSettingTab {
button.setButtonText(t("settings_checkonnectivity_button"));
button.onClick(async () => {
new Notice(t("settings_checkonnectivity_checking"));
const self = this;
const client = getClient(
this.plugin.settings,
this.app.vault.getName(),
@ -1632,7 +1645,6 @@ export class RemotelySaveSettingTab extends PluginSettingTab {
button.setButtonText(t("settings_checkonnectivity_button"));
button.onClick(async () => {
new Notice(t("settings_checkonnectivity_checking"));
const self = this;
const client = getClient(
this.plugin.settings,
this.app.vault.getName(),
@ -1655,6 +1667,147 @@ export class RemotelySaveSettingTab extends PluginSettingTab {
});
});
//////////////////////////////////////////////////
// below for webdis
//////////////////////////////////////////////////
const webdisDiv = containerEl.createEl("div", { cls: "webdis-hide" });
webdisDiv.toggleClass(
"webdis-hide",
this.plugin.settings.serviceType !== "webdis"
);
webdisDiv.createEl("h2", { text: t("settings_webdis") });
const webdisLongDescDiv = webdisDiv.createEl("div", {
cls: "settings-long-desc",
});
for (const c of [
t("settings_webdis_disclaimer1"),
t("settings_webdis_disclaimer2"),
]) {
webdisLongDescDiv.createEl("p", {
text: c,
cls: "webdis-disclaimer",
});
}
webdisLongDescDiv.createEl("p", {
text: t("settings_webdis_folder", {
remoteBaseDir:
this.plugin.settings.webdis.remoteBaseDir || this.app.vault.getName(),
}),
});
new Setting(webdisDiv)
.setName(t("settings_webdis_addr"))
.setDesc(t("settings_webdis_addr_desc"))
.addText((text) =>
text
.setPlaceholder("https://")
.setValue(this.plugin.settings.webdis.address)
.onChange(async (value) => {
this.plugin.settings.webdis.address = value.trim();
// normally saved
await this.plugin.saveSettings();
})
);
new Setting(webdisDiv)
.setName(t("settings_webdis_user"))
.setDesc(t("settings_webdis_user_desc"))
.addText((text) => {
wrapTextWithPasswordHide(text);
text
.setPlaceholder("")
.setValue(this.plugin.settings.webdis.username ?? "")
.onChange(async (value) => {
this.plugin.settings.webdis.username = (value ?? "").trim();
await this.plugin.saveSettings();
});
});
new Setting(webdisDiv)
.setName(t("settings_webdis_password"))
.setDesc(t("settings_webdis_password_desc"))
.addText((text) => {
wrapTextWithPasswordHide(text);
text
.setPlaceholder("")
.setValue(this.plugin.settings.webdis.password ?? "")
.onChange(async (value) => {
this.plugin.settings.webdis.password = (value ?? "").trim();
await this.plugin.saveSettings();
});
});
let newWebdisRemoteBaseDir =
this.plugin.settings.webdis.remoteBaseDir || "";
new Setting(webdisDiv)
.setName(t("settings_remotebasedir"))
.setDesc(t("settings_remotebasedir_desc"))
.addText((text) =>
text
.setPlaceholder(this.app.vault.getName())
.setValue(newWebdisRemoteBaseDir)
.onChange((value) => {
newWebdisRemoteBaseDir = value.trim();
})
)
.addButton((button) => {
button.setButtonText(t("confirm"));
button.onClick(() => {
new ChangeRemoteBaseDirModal(
this.app,
this.plugin,
newWebdisRemoteBaseDir,
"webdis"
).open();
});
});
new Setting(webdisDiv)
.setName(t("settings_checkonnectivity"))
.setDesc(t("settings_checkonnectivity_desc"))
.addButton(async (button) => {
button.setButtonText(t("settings_checkonnectivity_button"));
button.onClick(async () => {
new Notice(t("settings_checkonnectivity_checking"));
const client = getClient(
this.plugin.settings,
this.app.vault.getName(),
() => this.plugin.saveSettings()
);
const errors = { msg: "" };
const res = await client.checkConnect((err: any) => {
errors.msg = `${err}`;
});
if (res) {
new Notice(t("settings_webdis_connect_succ"));
} else {
new Notice(t("settings_webdis_connect_fail"));
new Notice(errors.msg);
}
});
});
//////////////////////////////////////////////////
// below for googledrive
//////////////////////////////////////////////////
const {
googleDriveDiv,
googleDriveAllowedToUsedDiv,
googleDriveNotShowUpHintSetting,
} = generateGoogleDriveSettingsPart(
containerEl,
t,
this.app,
this.plugin,
() => this.plugin.saveSettings()
);
//////////////////////////////////////////////////
// below for general chooser (part 2/2)
//////////////////////////////////////////////////
@ -1669,6 +1822,12 @@ export class RemotelySaveSettingTab extends PluginSettingTab {
dropdown.addOption("dropbox", t("settings_chooseservice_dropbox"));
dropdown.addOption("webdav", t("settings_chooseservice_webdav"));
dropdown.addOption("onedrive", t("settings_chooseservice_onedrive"));
dropdown.addOption("webdis", t("settings_chooseservice_webdis"));
dropdown.addOption(
"googledrive",
t("settings_chooseservice_googledrive")
);
dropdown
.setValue(this.plugin.settings.serviceType)
.onChange(async (val) => {
@ -1689,6 +1848,14 @@ export class RemotelySaveSettingTab extends PluginSettingTab {
"webdav-hide",
this.plugin.settings.serviceType !== "webdav"
);
webdisDiv.toggleClass(
"webdis-hide",
this.plugin.settings.serviceType !== "webdis"
);
googleDriveDiv.toggleClass(
"googledrive-hide",
this.plugin.settings.serviceType !== "googledrive"
);
await this.plugin.saveSettings();
});
});
@ -1750,7 +1917,7 @@ export class RemotelySaveSettingTab extends PluginSettingTab {
dropdown
.setValue(`${this.plugin.settings.autoRunEveryMilliseconds}`)
.onChange(async (val: string) => {
const realVal = parseInt(val);
const realVal = Number.parseInt(val);
this.plugin.settings.autoRunEveryMilliseconds = realVal;
await this.plugin.saveSettings();
if (
@ -1795,7 +1962,7 @@ export class RemotelySaveSettingTab extends PluginSettingTab {
dropdown
.setValue(`${this.plugin.settings.initRunAfterMilliseconds}`)
.onChange(async (val: string) => {
const realVal = parseInt(val);
const realVal = Number.parseInt(val);
this.plugin.settings.initRunAfterMilliseconds = realVal;
await this.plugin.saveSettings();
});
@ -1815,7 +1982,8 @@ export class RemotelySaveSettingTab extends PluginSettingTab {
dropdown
.setValue(`${syncOnSaveEnabled ? "1000" : "-1"}`)
.onChange(async (val: string) => {
this.plugin.settings.syncOnSaveAfterMilliseconds = parseInt(val);
this.plugin.settings.syncOnSaveAfterMilliseconds =
Number.parseInt(val);
await this.plugin.saveSettings();
this.plugin.toggleSyncOnSaveIfSet();
});
@ -1827,14 +1995,14 @@ export class RemotelySaveSettingTab extends PluginSettingTab {
.addDropdown((dropdown) => {
dropdown.addOption("-1", t("settings_skiplargefiles_notset"));
const mbs = [1, 5, 10, 50, 100, 500, 1000];
const mbs = [1, 5, 10, 20, 50, 100, 200, 500, 1000];
for (const mb of mbs) {
dropdown.addOption(`${mb * 1000 * 1000}`, `${mb} MB`);
}
dropdown
.setValue(`${this.plugin.settings.skipSizeLargerThan}`)
.onChange(async (val) => {
this.plugin.settings.skipSizeLargerThan = parseInt(val);
this.plugin.settings.skipSizeLargerThan = Number.parseInt(val);
await this.plugin.saveSettings();
});
});
@ -1916,7 +2084,7 @@ export class RemotelySaveSettingTab extends PluginSettingTab {
dropdown
.setValue(`${this.plugin.settings.concurrency}`)
.onChange(async (val) => {
const realVal = parseInt(val);
const realVal = Number.parseInt(val);
this.plugin.settings.concurrency = realVal;
await this.plugin.saveSettings();
});
@ -1988,25 +2156,44 @@ export class RemotelySaveSettingTab extends PluginSettingTab {
});
});
new Setting(advDiv)
let conflictActionSettingOrigDesc = t("settings_conflictaction_desc");
if (
(this.plugin.settings.conflictAction ?? "keep_newer") === "smart_conflict"
) {
conflictActionSettingOrigDesc += t(
"settings_conflictaction_smart_conflict_desc"
);
}
const conflictActionSetting = new Setting(advDiv)
.setName(t("settings_conflictaction"))
.setDesc(t("settings_conflictaction_desc"))
.addDropdown((dropdown) => {
dropdown.addOption(
"keep_newer",
t("settings_conflictaction_keep_newer")
);
dropdown.addOption(
"keep_larger",
t("settings_conflictaction_keep_larger")
);
dropdown
.setValue(this.plugin.settings.conflictAction ?? "keep_newer")
.onChange(async (val) => {
this.plugin.settings.conflictAction = val as ConflictActionType;
await this.plugin.saveSettings();
});
});
.setDesc(stringToFragment(conflictActionSettingOrigDesc));
conflictActionSetting.addDropdown((dropdown) => {
dropdown
.addOption("keep_newer", t("settings_conflictaction_keep_newer"))
.addOption("keep_larger", t("settings_conflictaction_keep_larger"))
.addOption(
"smart_conflict",
t("settings_conflictaction_smart_conflict")
)
.setValue(this.plugin.settings.conflictAction ?? "keep_newer")
.onChange(async (val) => {
this.plugin.settings.conflictAction = val as ConflictActionType;
await this.plugin.saveSettings();
conflictActionSettingOrigDesc = t("settings_conflictaction_desc");
if (
(this.plugin.settings.conflictAction ?? "keep_newer") ===
"smart_conflict"
) {
conflictActionSettingOrigDesc += t(
"settings_conflictaction_smart_conflict_desc"
);
}
conflictActionSetting.setDesc(
stringToFragment(conflictActionSettingOrigDesc)
);
});
});
new Setting(advDiv)
.setName(t("settings_cleanemptyfolder"))
@ -2026,28 +2213,77 @@ export class RemotelySaveSettingTab extends PluginSettingTab {
});
});
new Setting(advDiv)
const percentage1 = new Setting(advDiv)
.setName(t("settings_protectmodifypercentage"))
.setDesc(t("settings_protectmodifypercentage_desc"))
.addDropdown((dropdown) => {
for (const i of Array.from({ length: 11 }, (x, i) => i * 10)) {
let desc = `${i}`;
if (i === 0) {
desc = t("settings_protectmodifypercentage_000_desc");
} else if (i === 50) {
desc = t("settings_protectmodifypercentage_050_desc");
} else if (i === 100) {
desc = t("settings_protectmodifypercentage_100_desc");
}
dropdown.addOption(`${i}`, desc);
}
dropdown
.setValue(`${this.plugin.settings.protectModifyPercentage ?? 50}`)
.onChange(async (val) => {
this.plugin.settings.protectModifyPercentage = parseInt(val);
.setDesc(t("settings_protectmodifypercentage_desc"));
const percentage2 = new Setting(advDiv)
.setName(t("settings_protectmodifypercentage_customfield"))
.setDesc(t("settings_protectmodifypercentage_customfield_desc"));
if ((this.plugin.settings.protectModifyPercentage ?? 50) % 10 === 0) {
percentage2.settingEl.addClass("settings-percentage-custom-hide");
}
let percentage2Text: TextComponent | undefined = undefined;
percentage2.addText((text) => {
text.inputEl.type = "number";
percentage2Text = text;
text
.setPlaceholder("0 ~ 100")
.setValue(`${this.plugin.settings.protectModifyPercentage ?? 50}`)
.onChange(async (val) => {
let k = Number.parseFloat(val.trim());
if (Number.isNaN(k)) {
// do nothing!
} else {
if (k < 0) {
k = 0;
} else if (k > 100) {
k = 100;
}
this.plugin.settings.protectModifyPercentage = k;
await this.plugin.saveSettings();
});
}
});
});
percentage1.addDropdown((dropdown) => {
for (const i of Array.from({ length: 11 }, (x, i) => i * 10)) {
let desc = `${i}`;
if (i === 0) {
desc = t("settings_protectmodifypercentage_000_desc");
} else if (i === 50) {
desc = t("settings_protectmodifypercentage_050_desc");
} else if (i === 100) {
desc = t("settings_protectmodifypercentage_100_desc");
}
dropdown.addOption(`${i}`, desc);
}
dropdown.addOption(
"custom",
t("settings_protectmodifypercentage_custom_desc")
);
const p = this.plugin.settings.protectModifyPercentage ?? 50;
let initVal = "custom";
if (p % 10 === 0) {
initVal = `${p}`;
} else {
// show custom
percentage2.settingEl.removeClass("settings-percentage-custom");
}
dropdown.setValue(initVal).onChange(async (val) => {
const k = Number.parseInt(val);
if (val === "custom" || Number.isNaN(k)) {
// do nothing until user changes something in custom field
percentage2.settingEl.removeClass("settings-percentage-custom-hide");
} else {
this.plugin.settings.protectModifyPercentage = k;
percentage2.settingEl.addClass("settings-percentage-custom-hide");
percentage2Text?.setValue(`${k}`);
await this.plugin.saveSettings();
}
});
});
new Setting(advDiv)
.setName(t("setting_syncdirection"))
@ -2119,23 +2355,28 @@ export class RemotelySaveSettingTab extends PluginSettingTab {
importExportDiv.createEl("h2", {
text: t("settings_importexport"),
});
if (Platform.isMobile) {
importExportDiv.addClass("setting-need-wrapping-mobile");
}
new Setting(importExportDiv)
const importExportDivSetting1 = new Setting(importExportDiv)
.setName(t("settings_export"))
.setDesc(t("settings_export_desc"))
.setDesc(t("settings_export_desc"));
importExportDivSetting1.settingEl.addClass("setting-need-wrapping");
importExportDivSetting1
.addButton(async (button) => {
button.setButtonText(t("settings_export_all_but_oauth2_button"));
button.setButtonText(t("settings_export_basic_and_advanced_button"));
button.onClick(async () => {
new ExportSettingsQrCodeModal(
this.app,
this.plugin,
"all_but_oauth2"
"basic_and_advanced"
).open();
});
})
.addButton(async (button) => {
button.setButtonText(t("settings_export_s3_button"));
button.onClick(async () => {
new ExportSettingsQrCodeModal(this.app, this.plugin, "s3").open();
});
})
.addButton(async (button) => {
button.setButtonText(t("settings_export_dropbox_button"));
button.onClick(async () => {
@ -2155,6 +2396,28 @@ export class RemotelySaveSettingTab extends PluginSettingTab {
"onedrive"
).open();
});
})
.addButton(async (button) => {
button.setButtonText(t("settings_export_webdav_button"));
button.onClick(async () => {
new ExportSettingsQrCodeModal(this.app, this.plugin, "webdav").open();
});
})
.addButton(async (button) => {
button.setButtonText(t("settings_export_webdis_button"));
button.onClick(async () => {
new ExportSettingsQrCodeModal(this.app, this.plugin, "webdis").open();
});
})
.addButton(async (button) => {
button.setButtonText(t("settings_export_googledrive_button"));
button.onClick(async () => {
new ExportSettingsQrCodeModal(
this.app,
this.plugin,
"googledrive"
).open();
});
});
let importSettingVal = "";
@ -2209,6 +2472,21 @@ export class RemotelySaveSettingTab extends PluginSettingTab {
});
});
//////////////////////////////////////////////////
// below for pro
//////////////////////////////////////////////////
const proDiv = containerEl.createEl("div");
generateProSettingsPart(
proDiv,
t,
this.app,
this.plugin,
() => this.plugin.saveSettings(),
googleDriveAllowedToUsedDiv,
googleDriveNotShowUpHintSetting
);
//////////////////////////////////////////////////
// below for debug
//////////////////////////////////////////////////
@ -2271,9 +2549,24 @@ export class RemotelySaveSettingTab extends PluginSettingTab {
.setName(t("settings_viewconsolelog"))
.setDesc(stringToFragment(t("settings_viewconsolelog_desc")));
new Setting(debugDiv)
const debugDivExportSyncPlans = new Setting(debugDiv)
.setName(t("settings_syncplans"))
.setDesc(t("settings_syncplans_desc"))
.setDesc(t("settings_syncplans_desc"));
debugDivExportSyncPlans.settingEl.addClass("setting-need-wrapping");
debugDivExportSyncPlans
.addButton(async (button) => {
button.setButtonText(t("settings_syncplans_button_1_only_change"));
button.onClick(async () => {
await exportVaultSyncPlansToFiles(
this.plugin.db,
this.app.vault,
this.plugin.vaultRandomID,
1,
true
);
new Notice(t("settings_syncplans_notice"));
});
})
.addButton(async (button) => {
button.setButtonText(t("settings_syncplans_button_1"));
button.onClick(async () => {
@ -2281,7 +2574,8 @@ export class RemotelySaveSettingTab extends PluginSettingTab {
this.plugin.db,
this.app.vault,
this.plugin.vaultRandomID,
1
1,
false
);
new Notice(t("settings_syncplans_notice"));
});
@ -2293,7 +2587,8 @@ export class RemotelySaveSettingTab extends PluginSettingTab {
this.plugin.db,
this.app.vault,
this.plugin.vaultRandomID,
5
5,
false
);
new Notice(t("settings_syncplans_notice"));
});
@ -2305,7 +2600,8 @@ export class RemotelySaveSettingTab extends PluginSettingTab {
this.plugin.db,
this.app.vault,
this.plugin.vaultRandomID,
-1
-1,
false
);
new Notice(t("settings_syncplans_notice"));
});
@ -2351,6 +2647,44 @@ export class RemotelySaveSettingTab extends PluginSettingTab {
});
});
new Setting(debugDiv)
.setName(t("settings_profiler_enabledebugprint"))
.setDesc(t("settings_profiler_enabledebugprint_desc"))
.addDropdown((dropdown) => {
dropdown.addOption("enable", t("enable"));
dropdown.addOption("disable", t("disable"));
dropdown
.setValue(
this.plugin.settings.profiler?.enablePrinting ? "enable" : "disable"
)
.onChange(async (val: string) => {
if (this.plugin.settings.profiler === undefined) {
this.plugin.settings.profiler = DEFAULT_PROFILER_CONFIG;
}
this.plugin.settings.profiler.enablePrinting = val === "enable";
await this.plugin.saveSettings();
});
});
new Setting(debugDiv)
.setName(t("settings_profiler_recordsize"))
.setDesc(t("settings_profiler_recordsize_desc"))
.addDropdown((dropdown) => {
dropdown.addOption("enable", t("enable"));
dropdown.addOption("disable", t("disable"));
dropdown
.setValue(
this.plugin.settings.profiler?.recordSize ? "enable" : "disable"
)
.onChange(async (val: string) => {
if (this.plugin.settings.profiler === undefined) {
this.plugin.settings.profiler = DEFAULT_PROFILER_CONFIG;
}
this.plugin.settings.profiler.recordSize = val === "enable";
await this.plugin.saveSettings();
});
});
new Setting(debugDiv)
.setName(t("settings_outputbasepathvaultid"))
.setDesc(t("settings_outputbasepathvaultid_desc"))
@ -2376,7 +2710,7 @@ export class RemotelySaveSettingTab extends PluginSettingTab {
}
hide() {
let { containerEl } = this;
const { containerEl } = this;
containerEl.empty();
super.hide();
}

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,6 @@
import { App, Modal, Notice, PluginSettingTab, Setting } from "obsidian";
import type RemotelySavePlugin from "./main"; // unavoidable
import { type App, Modal } from "obsidian";
import type { TransItemType } from "./i18n";
import type RemotelySavePlugin from "./main"; // unavoidable
import { stringToFragment } from "./misc";
@ -17,7 +17,7 @@ export class SyncAlgoV3Modal extends Modal {
this.requireUpdateAllDev = false;
}
onOpen() {
let { contentEl } = this;
const { contentEl } = this;
const t = (x: TransItemType, vars?: any) => {
return this.plugin.i18n.t(x, vars);
};
@ -112,7 +112,7 @@ export class SyncAlgoV3Modal extends Modal {
}
onClose() {
let { contentEl } = this;
const { contentEl } = this;
contentEl.empty();
if (this.agree) {
console.info("agree to use the new algorithm");

View File

@ -17,6 +17,10 @@
padding-top: 18px;
}
.settings-percentage-custom-hide {
display: none;
}
.s3-disclaimer {
font-weight: bold;
}
@ -61,6 +65,32 @@
display: none;
}
.webdis-disclaimer {
font-weight: bold;
}
.webdis-hide {
display: none;
}
.googledrive-disclaimer {
font-weight: bold;
}
.googledrive-hide {
display: none;
}
.googledrive-allow-to-use-hide {
display: none;
}
.googledrive-auth-button-hide {
display: none;
}
.googledrive-revoke-auth-button-hide {
display: none;
}
.qrcode-img {
width: 350px;
height: 350px;
@ -75,6 +105,22 @@
font-weight: bolder;
}
.setting-need-wrapping-mobile .setting-item-control {
flex-wrap: wrap;
.setting-need-wrapping .setting-item-control {
/* flex-wrap: wrap; */
display: grid;
}
.pro-disclaimer {
font-weight: bold;
}
.pro-hide {
display: none;
}
.pro-auth-button-hide {
display: none;
}
.pro-revoke-auth-button-hide {
display: none;
}

View File

@ -1,6 +1,6 @@
import { strict as assert } from "assert";
import { RemotelySavePluginSettings } from "../src/baseTypes";
import type { RemotelySavePluginSettings } from "../src/baseTypes";
import { messyConfigToNormal, normalConfigToMessy } from "../src/configPersist";
const DEFAULT_SETTINGS: RemotelySavePluginSettings = {
@ -16,6 +16,12 @@ const DEFAULT_SETTINGS: RemotelySavePluginSettings = {
onedrive: {
username: "test 🍎 emoji",
} as any,
webdis: {
address: "addr",
} as any,
googledrive: {
refreshToken: "xxx",
} as any,
password: "password",
serviceType: "s3",
currLogLevel: "info",

View File

@ -13,7 +13,7 @@ import {
import { base64ToBase64url, bufferToArrayBuffer } from "../src/misc";
describe("Encryption OpenSSL tests", () => {
beforeEach(function () {
beforeEach(() => {
global.window = {
crypto: require("crypto").webcrypto,
} as any;
@ -157,7 +157,7 @@ describe("Encryption OpenSSL tests", () => {
assert.throws(() => getSizeFromEncToOrig(14787231));
let { minSize, maxSize } = getSizeFromEncToOrig(14787232);
const { minSize, maxSize } = getSizeFromEncToOrig(14787232);
assert.ok(minSize <= 14787203 && 14787203 <= maxSize);
});
});

12
tests/fsWebdis.test.ts Normal file
View File

@ -0,0 +1,12 @@
import { strict as assert } from "assert";
import { getOrigPath } from "../src/fsWebdis";
describe("Webdis operations tests", () => {
it("should get orig keys correctly", () => {
const input = "rs:fs:v1:库名字/something dev.md:meta";
const output = getOrigPath(input, "库名字");
const expected = "something dev.md";
assert.equal(output, expected);
});
});

View File

@ -1,7 +1,7 @@
import { strict as assert } from "assert";
import {
type MetadataOnRemote,
isEqualMetadataOnRemote,
MetadataOnRemote,
} from "../src/metadataOnRemote";
describe("Metadata operations tests", () => {

View File

@ -179,7 +179,7 @@ describe("Misc: get dirname", () => {
});
describe("Misc: extract svg", () => {
beforeEach(function () {
beforeEach(() => {
const fakeBrowser = new JSDOM("");
global.window = fakeBrowser.window as any;
});
@ -286,6 +286,83 @@ describe("Misc: special char for dir", () => {
});
});
describe("Misc: split chunk ranges", () => {
it("should fail on negative numner", () => {
assert.throws(() => misc.splitFileSizeToChunkRanges(-1, 2));
assert.throws(() => misc.splitFileSizeToChunkRanges(1, -1));
assert.throws(() => misc.splitFileSizeToChunkRanges(1, 0));
});
it("should return nothing for 0 input", () => {
let input: [number, number] = [0, 1];
let output: any = [];
assert.deepStrictEqual(output, misc.splitFileSizeToChunkRanges(...input));
input = [0, 100];
output = [];
assert.deepStrictEqual(output, misc.splitFileSizeToChunkRanges(...input));
});
it("should return single item for 1 input", () => {
let input: [number, number] = [1, 1];
let output = [{ start: 0, end: 0 }];
assert.deepStrictEqual(output, misc.splitFileSizeToChunkRanges(...input));
input = [1, 100];
output = [{ start: 0, end: 0 }];
assert.deepStrictEqual(output, misc.splitFileSizeToChunkRanges(...input));
});
it("should return single item for larger or equal input", () => {
let input: [number, number] = [10, 10];
let output = [{ start: 0, end: 9 }];
assert.deepStrictEqual(output, misc.splitFileSizeToChunkRanges(...input));
input = [10, 21];
output = [{ start: 0, end: 9 }];
assert.deepStrictEqual(output, misc.splitFileSizeToChunkRanges(...input));
});
it("should return correct items for normal input", () => {
let input: [number, number] = [10, 9];
let output = [
{ start: 0, end: 8 },
{ start: 9, end: 9 },
];
assert.deepStrictEqual(output, misc.splitFileSizeToChunkRanges(...input));
input = [10, 5];
output = [
{ start: 0, end: 4 },
{ start: 5, end: 9 },
];
assert.deepStrictEqual(output, misc.splitFileSizeToChunkRanges(...input));
input = [3, 1];
output = [
{ start: 0, end: 0 },
{ start: 1, end: 1 },
{ start: 2, end: 2 },
];
assert.deepStrictEqual(output, misc.splitFileSizeToChunkRanges(...input));
input = [15, 5];
output = [
{ start: 0, end: 4 },
{ start: 5, end: 9 },
{ start: 10, end: 14 },
];
assert.deepStrictEqual(output, misc.splitFileSizeToChunkRanges(...input));
input = [1024, 578];
output = [
{ start: 0, end: 577 },
{ start: 578, end: 1023 },
];
assert.deepStrictEqual(output, misc.splitFileSizeToChunkRanges(...input));
});
});
describe("Misc: Dropbox: should fix the folder name cases", () => {
it("should do nothing on empty folders", () => {
const input: any[] = [];

View File

@ -6,6 +6,11 @@ const TerserPlugin = require("terser-webpack-plugin");
const DEFAULT_DROPBOX_APP_KEY = process.env.DROPBOX_APP_KEY || "";
const DEFAULT_ONEDRIVE_CLIENT_ID = process.env.ONEDRIVE_CLIENT_ID || "";
const DEFAULT_ONEDRIVE_AUTHORITY = process.env.ONEDRIVE_AUTHORITY || "";
const DEFAULT_REMOTELYSAVE_WEBSITE = process.env.REMOTELYSAVE_WEBSITE || "";
const DEFAULT_REMOTELYSAVE_CLIENT_ID = process.env.REMOTELYSAVE_CLIENT_ID || "";
const DEFAULT_GOOGLEDRIVE_CLIENT_ID = process.env.GOOGLEDRIVE_CLIENT_ID || "";
const DEFAULT_GOOGLEDRIVE_CLIENT_SECRET =
process.env.GOOGLEDRIVE_CLIENT_SECRET || "";
module.exports = {
entry: "./src/main.ts",
@ -20,6 +25,10 @@ module.exports = {
"process.env.DEFAULT_DROPBOX_APP_KEY": `"${DEFAULT_DROPBOX_APP_KEY}"`,
"process.env.DEFAULT_ONEDRIVE_CLIENT_ID": `"${DEFAULT_ONEDRIVE_CLIENT_ID}"`,
"process.env.DEFAULT_ONEDRIVE_AUTHORITY": `"${DEFAULT_ONEDRIVE_AUTHORITY}"`,
"process.env.DEFAULT_REMOTELYSAVE_WEBSITE": `"${DEFAULT_REMOTELYSAVE_WEBSITE}"`,
"process.env.DEFAULT_REMOTELYSAVE_CLIENT_ID": `"${DEFAULT_REMOTELYSAVE_CLIENT_ID}"`,
"process.env.DEFAULT_GOOGLEDRIVE_CLIENT_ID": `"${DEFAULT_GOOGLEDRIVE_CLIENT_ID}"`,
"process.env.DEFAULT_GOOGLEDRIVE_CLIENT_SECRET": `"${DEFAULT_GOOGLEDRIVE_CLIENT_SECRET}"`,
}),
// Work around for Buffer is undefined:
// https://github.com/webpack/changelog-v5/issues/10