Merge remote-tracking branch 'origin/v1' into websocket-proxy

pull/1536/head
FabianLars 2 weeks ago
commit 21e0c5ce5c
No known key found for this signature in database

@ -33,15 +33,6 @@
} }
}, },
"packages": { "packages": {
"authenticator": {
"path": "./plugins/authenticator",
"manager": "rust-disabled"
},
"authenticator-js": {
"path": "./plugins/authenticator",
"manager": "javascript-disabled"
},
"autostart": { "autostart": {
"path": "./plugins/autostart", "path": "./plugins/autostart",
"manager": "rust-disabled" "manager": "rust-disabled"

@ -42,7 +42,7 @@ jobs:
node-version: 20 node-version: 20
- uses: pnpm/action-setup@v2 - uses: pnpm/action-setup@v2
with: with:
version: 9.x.x version: 10.x.x
run_install: true run_install: true
- name: audit - name: audit
run: pnpm audit run: pnpm audit

@ -7,7 +7,7 @@ on:
jobs: jobs:
version-or-publish: version-or-publish:
runs-on: ubuntu-latest runs-on: ubuntu-22.04
timeout-minutes: 65 timeout-minutes: 65
outputs: outputs:
change: ${{ steps.covector.outputs.change }} change: ${{ steps.covector.outputs.change }}
@ -26,13 +26,13 @@ jobs:
- uses: pnpm/action-setup@v2 - uses: pnpm/action-setup@v2
with: with:
version: 9.x.x version: 10.x.x
run_install: true run_install: true
- name: install webkit2gtk and libudev for [authenticator] - name: install webkit2gtk
run: | run: |
sudo apt-get update sudo apt-get update
sudo apt-get install -y libwebkit2gtk-4.0-dev libwebkit2gtk-4.1-dev libudev-dev sudo apt-get install -y libwebkit2gtk-4.0-dev libwebkit2gtk-4.1-dev
- name: cargo login - name: cargo login
run: cargo login ${{ secrets.ORG_CRATES_IO_TOKEN }} run: cargo login ${{ secrets.ORG_CRATES_IO_TOKEN }}

@ -45,7 +45,7 @@ jobs:
node-version: 20 node-version: 20
- uses: pnpm/action-setup@v2 - uses: pnpm/action-setup@v2
with: with:
version: 9.x.x version: 10.x.x
run_install: true run_install: true
- name: eslint - name: eslint
run: pnpm lint run: pnpm lint
@ -65,7 +65,7 @@ jobs:
node-version: 20 node-version: 20
- uses: pnpm/action-setup@v2 - uses: pnpm/action-setup@v2
with: with:
version: 9.x.x version: 10.x.x
run_install: true run_install: true
- name: prettier check - name: prettier check
run: pnpm format-check run: pnpm format-check

@ -24,17 +24,17 @@ concurrency:
jobs: jobs:
clippy: clippy:
runs-on: ubuntu-latest runs-on: ubuntu-22.04
strategy: strategy:
fail-fast: false fail-fast: false
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
- name: install webkit2gtk and libudev for [authenticator] - name: install webkit2gtk
run: | run: |
sudo apt-get update sudo apt-get update
sudo apt-get install -y libwebkit2gtk-4.0-dev libwebkit2gtk-4.1-dev libudev-dev sudo apt-get install -y libwebkit2gtk-4.0-dev libwebkit2gtk-4.1-dev
- name: Install clippy with stable toolchain - name: Install clippy with stable toolchain
uses: dtolnay/rust-toolchain@stable uses: dtolnay/rust-toolchain@stable

@ -26,26 +26,26 @@ concurrency:
jobs: jobs:
msrv: msrv:
runs-on: ubuntu-latest runs-on: ubuntu-22.04
strategy: strategy:
fail-fast: false fail-fast: false
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
- name: install webkit2gtk and libudev for [authenticator] - name: install webkit2gtk
run: | run: |
sudo apt-get update sudo apt-get update
sudo apt-get install -y libwebkit2gtk-4.0-dev libwebkit2gtk-4.1-dev libudev-dev sudo apt-get install -y libwebkit2gtk-4.0-dev libwebkit2gtk-4.1-dev
- uses: dtolnay/rust-toolchain@1.64.0 - uses: dtolnay/rust-toolchain@1.67.0
- uses: Swatinem/rust-cache@v2 - uses: Swatinem/rust-cache@v2
- name: build - name: build
run: cargo build --workspace --exclude 'tauri-plugin-sql' --all-targets --all-features run: cargo build --workspace --exclude 'tauri-plugin-sql' --all-targets --all-features
- uses: dtolnay/rust-toolchain@1.65.0 - uses: dtolnay/rust-toolchain@1.80.1
- name: build sql:sqlite - name: build sql:sqlite
run: cargo build --package 'tauri-plugin-sql' --all-targets --features sqlite run: cargo build --package 'tauri-plugin-sql' --all-targets --features sqlite

@ -34,7 +34,7 @@ jobs:
- uses: pnpm/action-setup@v2 - uses: pnpm/action-setup@v2
with: with:
version: 9.x.x version: 10.x.x
run_install: true run_install: true
- name: Build packages - name: Build packages

656
Cargo.lock generated

File diff suppressed because it is too large Load Diff

@ -1,7 +1,6 @@
[workspace] [workspace]
# Listed without globs to prevent issues with renovate's baseBranches config. # Listed without globs to prevent issues with renovate's baseBranches config.
members = [ members = [
"plugins/authenticator",
"plugins/autostart", "plugins/autostart",
"plugins/fs-extra", "plugins/fs-extra",
"plugins/fs-watch", "plugins/fs-watch",
@ -34,4 +33,4 @@ thiserror = "1"
authors = ["Tauri Programme within The Commons Conservancy"] authors = ["Tauri Programme within The Commons Conservancy"]
edition = "2021" edition = "2021"
license = "Apache-2.0 OR MIT" license = "Apache-2.0 OR MIT"
rust-version = "1.64" rust-version = "1.67"

@ -2,7 +2,6 @@
| | | Win | Mac | Lin | iOS | And | | | | Win | Mac | Lin | iOS | And |
| ------------------------------------------ | --------------------------------------------------------- | --- | --- | --- | --- | --- | | ------------------------------------------ | --------------------------------------------------------- | --- | --- | --- | --- | --- |
| [authenticator](plugins/authenticator) | Interface with hardware security keys. | ✅ | ✅ | ✅ | ? | ? |
| [autostart](plugins/autostart) | Automatically launch your app at system startup. | ✅ | ✅ | ✅ | ? | ? | | [autostart](plugins/autostart) | Automatically launch your app at system startup. | ✅ | ✅ | ✅ | ? | ? |
| [fs-extra](plugins/fs-extra) | File system methods that aren't included in the core API. | ✅ | ✅ | ✅ | ? | ? | | [fs-extra](plugins/fs-extra) | File system methods that aren't included in the core API. | ✅ | ✅ | ✅ | ? | ? |
| [fs-watch](plugins/fs-watch) | Watch the filesystem for changes. | ✅ | ✅ | ✅ | ? | ? | | [fs-watch](plugins/fs-watch) | Watch the filesystem for changes. | ✅ | ✅ | ✅ | ? | ? |
@ -18,7 +17,7 @@
| [websocket](plugins/websocket) | Open a WebSocket connection using a Rust client in JS. | ✅ | ✅ | ✅ | ? | ? | | [websocket](plugins/websocket) | Open a WebSocket connection using a Rust client in JS. | ✅ | ✅ | ✅ | ? | ? |
| [window-state](plugins/window-state) | Persist window sizes and positions. | ✅ | ✅ | ✅ | ? | ? | | [window-state](plugins/window-state) | Persist window sizes and positions. | ✅ | ✅ | ✅ | ? | ? |
_This repo and all plugins require a Rust version of at least **1.64**_ _This repo and all plugins require a Rust version of at least **1.67**_
## Partners ## Partners

@ -10,24 +10,23 @@
"format-check": "prettier --check ." "format-check": "prettier --check ."
}, },
"devDependencies": { "devDependencies": {
"@eslint/js": "9.6.0", "@eslint/js": "9.30.0",
"@rollup/plugin-node-resolve": "15.2.3", "@rollup/plugin-node-resolve": "16.0.1",
"@rollup/plugin-terser": "0.4.4", "@rollup/plugin-terser": "0.4.4",
"@rollup/plugin-typescript": "11.1.6", "@rollup/plugin-typescript": "11.1.6",
"@types/eslint__js": "8.42.3", "eslint": "9.30.0",
"eslint": "9.6.0", "eslint-config-prettier": "10.1.5",
"eslint-config-prettier": "9.1.0", "prettier": "3.6.2",
"prettier": "3.3.2", "rollup": "4.44.1",
"rollup": "4.18.1", "typescript": "5.8.3",
"typescript": "5.5.3", "typescript-eslint": "8.35.1"
"typescript-eslint": "rc-v8"
}, },
"resolutions": { "resolutions": {
"semver": ">=7.5.2", "semver": ">=7.5.2",
"optionator": ">=0.9.3" "optionator": ">=0.9.3"
}, },
"engines": { "engines": {
"pnpm": "^9.0.0" "pnpm": "^10.0.0"
}, },
"pnpm": { "pnpm": {
"auditConfig": { "auditConfig": {

@ -1 +0,0 @@
node_modules

@ -1,29 +0,0 @@
[package]
name = "tauri-plugin-authenticator"
version = "0.0.0"
description = "Use hardware security-keys in your Tauri App."
authors = { workspace = true }
license = { workspace = true }
edition = { workspace = true }
rust-version = { workspace = true }
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
serde = { workspace = true }
serde_json = { workspace = true }
tauri = { workspace = true }
log = { workspace = true }
thiserror = { workspace = true }
authenticator = "0.3.1"
once_cell = "1"
sha2 = "0.10"
base64 = "0.22"
chrono = "0.4"
bytes = "1"
byteorder = "1"
openssl = "0.10"
[dev-dependencies]
rand = "0.8"
rusty-fork = "0.3"

@ -1,20 +0,0 @@
SPDXVersion: SPDX-2.1
DataLicense: CC0-1.0
PackageName: tauri
DataFormat: SPDXRef-1
PackageSupplier: Organization: The Tauri Programme in the Commons Conservancy
PackageHomePage: https://tauri.app
PackageLicenseDeclared: Apache-2.0
PackageLicenseDeclared: MIT
PackageCopyrightText: 2019-2022, The Tauri Programme in the Commons Conservancy
PackageSummary: <text>Tauri is a rust project that enables developers to make secure
and small desktop applications using a web frontend.
</text>
PackageComment: <text>The package includes the following libraries; see
Relationship information.
</text>
Created: 2019-05-20T09:00:00Z
PackageDownloadLocation: git://github.com/tauri-apps/tauri
PackageDownloadLocation: git+https://github.com/tauri-apps/tauri.git
PackageDownloadLocation: git+ssh://github.com/tauri-apps/tauri.git
Creator: Person: Daniel Thompson-Yvetot

@ -1,177 +0,0 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS

@ -1,21 +0,0 @@
MIT License
Copyright (c) 2017 - Present Tauri Apps Contributors
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

@ -1,126 +0,0 @@
![plugin-authenticator](https://github.com/tauri-apps/plugins-workspace/raw/v1/plugins/authenticator/banner.png)
Use hardware security-keys in your Tauri App.
## Install
_This plugin requires a Rust version of at least **1.64**_
There are three general methods of installation that we can recommend.
1. Use crates.io and npm (easiest and requires you to trust that our publishing pipeline worked)
2. Pull sources directly from Github using git tags / revision hashes (most secure)
3. Git submodule install this repo in your tauri project and then use the file protocol to ingest the source (most secure, but inconvenient to use)
Install the authenticator plugin by adding the following lines to your `Cargo.toml` file:
`src-tauri/Cargo.toml`
```toml
[dependencies]
tauri-plugin-authenticator = "0.1"
# or through git
tauri-plugin-authenticator = { git = "https://github.com/tauri-apps/plugins-workspace", branch = "v1" }
```
You can install the JavaScript Guest bindings using your preferred JavaScript package manager:
> Note: Since most JavaScript package managers are unable to install packages from git monorepos we provide read-only mirrors of each plugin. This makes installation option 2 more ergonomic to use.
```sh
pnpm add https://github.com/tauri-apps/tauri-plugin-authenticator#v1
# or
npm add https://github.com/tauri-apps/tauri-plugin-authenticator#v1
# or
yarn add https://github.com/tauri-apps/tauri-plugin-authenticator#v1
```
## Usage
First, you need to register the authenticator plugin with Tauri:
`src-tauri/src/main.rs`
```rust
fn main() {
tauri::Builder::default()
.plugin(tauri_plugin_authenticator::init())
.run(tauri::generate_context!())
.expect("error while running tauri application");
}
```
Afterwards, all the plugin's APIs are available through the JavaScript guest bindings:
```javascript
import { Authenticator } from "tauri-plugin-authenticator-api";
const auth = new Authenticator();
auth.init(); // initialize transports
// generate a 32-bytes long random challenge
const arr = new Uint32Array(32);
window.crypto.getRandomValues(arr);
const b64 = btoa(String.fromCharCode.apply(null, arr));
// web-safe base64
const challenge = b64.replace(/\+/g, "-").replace(/\//g, "_");
const domain = "https://tauri.app";
// attempt to register with the security key
const json = await auth.register(challenge, domain);
const registerResult = JSON.parse(json);
// verify the registration was successful
const r2 = await auth.verifyRegistration(
challenge,
app,
registerResult.registerData,
registerResult.clientData,
);
const j2 = JSON.parse(r2);
// sign some data
const json = await auth.sign(challenge, app, keyHandle);
const signData = JSON.parse(json);
// verify the signature again
const counter = await auth.verifySignature(
challenge,
app,
signData.signData,
clientData,
keyHandle,
pubkey,
);
if (counter && counter > 0) {
console.log("SUCCESS!");
}
```
## Contributing
PRs accepted. Please make sure to read the Contributing Guide before making a pull request.
## Partners
<table>
<tbody>
<tr>
<td align="center" valign="middle">
<a href="https://crabnebula.dev" target="_blank">
<img src="https://github.com/tauri-apps/plugins-workspace/raw/v1/.github/sponsors/crabnebula.svg" alt="CrabNebula" width="283">
</a>
</td>
</tr>
</tbody>
</table>
For the complete list of sponsors please visit our [website](https://tauri.app#sponsors) and [Open Collective](https://opencollective.com/tauri).
## License
Code: (c) 2015 - Present - The Tauri Programme within The Commons Conservancy.
MIT or MIT/Apache 2.0 where applicable.

Binary file not shown.

Before

Width:  |  Height:  |  Size: 44 KiB

@ -1,60 +0,0 @@
import { invoke } from "@tauri-apps/api/tauri";
export class Authenticator {
async init(): Promise<void> {
return await invoke("plugin:authenticator|init_auth");
}
async register(challenge: string, application: string): Promise<string> {
return await invoke("plugin:authenticator|register", {
timeout: 10000,
challenge,
application,
});
}
async verifyRegistration(
challenge: string,
application: string,
registerData: string,
clientData: string,
): Promise<string> {
return await invoke("plugin:authenticator|verify_registration", {
challenge,
application,
registerData,
clientData,
});
}
async sign(
challenge: string,
application: string,
keyHandle: string,
): Promise<string> {
return await invoke("plugin:authenticator|sign", {
timeout: 10000,
challenge,
application,
keyHandle,
});
}
async verifySignature(
challenge: string,
application: string,
signData: string,
clientData: string,
keyHandle: string,
pubkey: string,
): Promise<number> {
return await invoke("plugin:authenticator|verify_signature", {
challenge,
application,
signData,
clientData,
keyHandle,
pubkey,
});
}
}

@ -1,33 +0,0 @@
{
"name": "tauri-plugin-authenticator-api",
"version": "0.0.0",
"description": "Use hardware security-keys in your Tauri App.",
"license": "MIT or APACHE-2.0",
"authors": [
"Tauri Programme within The Commons Conservancy"
],
"type": "module",
"browser": "dist-js/index.min.js",
"module": "dist-js/index.mjs",
"types": "dist-js/index.d.ts",
"exports": {
"import": "./dist-js/index.mjs",
"types": "./dist-js/index.d.ts",
"browser": "./dist-js/index.min.js"
},
"scripts": {
"build": "rollup -c"
},
"files": [
"dist-js",
"!dist-js/**/*.map",
"README.md",
"LICENSE"
],
"devDependencies": {
"tslib": "2.6.3"
},
"dependencies": {
"@tauri-apps/api": "1.6.0"
}
}

@ -1,11 +0,0 @@
import { readFileSync } from "fs";
import { createConfig } from "../../shared/rollup.config.mjs";
export default createConfig({
input: "guest-js/index.ts",
pkg: JSON.parse(
readFileSync(new URL("./package.json", import.meta.url), "utf8"),
),
external: [/^@tauri-apps\/api/],
});

@ -1,212 +0,0 @@
// Copyright 2021 Tauri Programme within The Commons Conservancy
// SPDX-License-Identifier: Apache-2.0
// SPDX-License-Identifier: MIT
use authenticator::{
authenticatorservice::AuthenticatorService, statecallback::StateCallback,
AuthenticatorTransports, KeyHandle, RegisterFlags, SignFlags, StatusUpdate,
};
use base64::{engine::general_purpose::URL_SAFE_NO_PAD, Engine};
use once_cell::sync::Lazy;
use serde::Serialize;
use sha2::{Digest, Sha256};
use std::io;
use std::sync::mpsc::channel;
use std::{convert::Into, sync::Mutex};
static MANAGER: Lazy<Mutex<AuthenticatorService>> = Lazy::new(|| {
let manager = AuthenticatorService::new().expect("The auth service should initialize safely");
Mutex::new(manager)
});
pub fn init_usb() {
let mut manager = MANAGER.lock().unwrap();
// theres also "add_detected_transports()" in the docs?
manager.add_u2f_usb_hid_platform_transports();
}
#[derive(Serialize, Clone)]
#[serde(rename_all = "camelCase")]
pub struct Registration {
pub key_handle: String,
pub pubkey: String,
pub register_data: String,
pub client_data: String,
}
pub fn register(application: String, timeout: u64, challenge: String) -> crate::Result<String> {
let (chall_bytes, app_bytes, client_data_string) =
format_client_data(application.as_str(), challenge.as_str());
// log the status rx?
let (status_tx, _status_rx) = channel::<StatusUpdate>();
let mut manager = MANAGER.lock().unwrap();
let (register_tx, register_rx) = channel();
let callback = StateCallback::new(Box::new(move |rv| {
register_tx.send(rv).unwrap();
}));
let res = manager.register(
RegisterFlags::empty(),
timeout,
chall_bytes,
app_bytes,
vec![],
status_tx,
callback,
);
match res {
Ok(_r) => {
let register_result = register_rx
.recv()
.expect("Problem receiving, unable to continue");
if let Err(e) = register_result {
return Err(e.into());
}
let (register_data, device_info) = register_result.unwrap(); // error already has been checked
// println!("Register result: {}", base64::encode(&register_data));
println!("Device info: {}", &device_info);
let (key_handle, public_key) =
_u2f_get_key_handle_and_public_key_from_register_response(&register_data).unwrap();
let key_handle_base64 = URL_SAFE_NO_PAD.encode(key_handle);
let public_key_base64 = URL_SAFE_NO_PAD.encode(public_key);
let register_data_base64 = URL_SAFE_NO_PAD.encode(&register_data);
println!("Key Handle: {}", &key_handle_base64);
println!("Public Key: {}", &public_key_base64);
// Ok(base64::encode(&register_data))
// Ok(key_handle_base64)
let res = serde_json::to_string(&Registration {
key_handle: key_handle_base64,
pubkey: public_key_base64,
register_data: register_data_base64,
client_data: client_data_string,
})?;
Ok(res)
}
Err(e) => Err(e.into()),
}
}
#[derive(Serialize, Clone)]
#[serde(rename_all = "camelCase")]
pub struct Signature {
pub key_handle: String,
pub sign_data: String,
}
pub fn sign(
application: String,
timeout: u64,
challenge: String,
key_handle: String,
) -> crate::Result<String> {
let credential = match URL_SAFE_NO_PAD.decode(key_handle) {
Ok(v) => v,
Err(e) => {
return Err(e.into());
}
};
let key_handle = KeyHandle {
credential,
transports: AuthenticatorTransports::empty(),
};
let (chall_bytes, app_bytes, _) = format_client_data(application.as_str(), challenge.as_str());
let (sign_tx, sign_rx) = channel();
let callback = StateCallback::new(Box::new(move |rv| {
sign_tx.send(rv).unwrap();
}));
// log the status rx?
let (status_tx, _status_rx) = channel::<StatusUpdate>();
let mut manager = MANAGER.lock().unwrap();
let res = manager.sign(
SignFlags::empty(),
timeout,
chall_bytes,
vec![app_bytes],
vec![key_handle],
status_tx,
callback,
);
match res {
Ok(_v) => {
let sign_result = sign_rx
.recv()
.expect("Problem receiving, unable to continue");
if let Err(e) = sign_result {
return Err(e.into());
}
let (_, handle_used, sign_data, device_info) = sign_result.unwrap();
let sig = URL_SAFE_NO_PAD.encode(sign_data);
println!("Sign result: {sig}");
println!("Key handle used: {}", URL_SAFE_NO_PAD.encode(&handle_used));
println!("Device info: {}", &device_info);
println!("Done.");
let res = serde_json::to_string(&Signature {
sign_data: sig,
key_handle: URL_SAFE_NO_PAD.encode(&handle_used),
})?;
Ok(res)
}
Err(e) => Err(e.into()),
}
}
fn format_client_data(application: &str, challenge: &str) -> (Vec<u8>, Vec<u8>, String) {
let d =
format!(r#"{{"challenge": "{challenge}", "version": "U2F_V2", "appId": "{application}"}}"#);
let mut challenge = Sha256::new();
challenge.update(d.as_bytes());
let chall_bytes = challenge.finalize().to_vec();
let mut app = Sha256::new();
app.update(application.as_bytes());
let app_bytes = app.finalize().to_vec();
(chall_bytes, app_bytes, d)
}
fn _u2f_get_key_handle_and_public_key_from_register_response(
register_response: &[u8],
) -> io::Result<(Vec<u8>, Vec<u8>)> {
if register_response[0] != 0x05 {
return Err(io::Error::new(
io::ErrorKind::InvalidData,
"Reserved byte not set correctly",
));
}
// 1: reserved
// 65: public key
// 1: key handle length
// key handle
// x.509 cert
// sig
let key_handle_len = register_response[66] as usize;
let mut public_key = register_response.to_owned();
let mut key_handle = public_key.split_off(67);
let _attestation = key_handle.split_off(key_handle_len);
// remove fist (reserved) and last (handle len) bytes
let pk: Vec<u8> = public_key[1..public_key.len() - 1].to_vec();
Ok((key_handle, pk))
}

@ -1,22 +0,0 @@
use serde::{Serialize, Serializer};
#[derive(Debug, thiserror::Error)]
pub enum Error {
#[error(transparent)]
Base64Decode(#[from] base64::DecodeError),
#[error(transparent)]
JSON(#[from] serde_json::Error),
#[error(transparent)]
U2F(#[from] crate::u2f_crate::u2ferror::U2fError),
#[error(transparent)]
Auth(#[from] authenticator::errors::AuthenticatorError),
}
impl Serialize for Error {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_str(self.to_string().as_ref())
}
}

@ -1,77 +0,0 @@
// Copyright 2021 Tauri Programme within The Commons Conservancy
// SPDX-License-Identifier: Apache-2.0
// SPDX-License-Identifier: MIT
mod auth;
mod error;
mod u2f;
mod u2f_crate;
use tauri::{
plugin::{Builder as PluginBuilder, TauriPlugin},
Runtime,
};
pub use error::Error;
type Result<T> = std::result::Result<T, Error>;
#[tauri::command]
fn init_auth() {
auth::init_usb();
}
#[tauri::command]
fn register(timeout: u64, challenge: String, application: String) -> crate::Result<String> {
auth::register(application, timeout, challenge)
}
#[tauri::command]
fn verify_registration(
challenge: String,
application: String,
register_data: String,
client_data: String,
) -> crate::Result<String> {
u2f::verify_registration(application, challenge, register_data, client_data)
}
#[tauri::command]
fn sign(
timeout: u64,
challenge: String,
application: String,
key_handle: String,
) -> crate::Result<String> {
auth::sign(application, timeout, challenge, key_handle)
}
#[tauri::command]
fn verify_signature(
challenge: String,
application: String,
sign_data: String,
client_data: String,
key_handle: String,
pubkey: String,
) -> crate::Result<u32> {
u2f::verify_signature(
application,
challenge,
sign_data,
client_data,
key_handle,
pubkey,
)
}
pub fn init<R: Runtime>() -> TauriPlugin<R> {
PluginBuilder::new("authenticator")
.invoke_handler(tauri::generate_handler![
init_auth,
register,
verify_registration,
sign,
verify_signature
])
.build()
}

@ -1,105 +0,0 @@
// Copyright 2021 Tauri Programme within The Commons Conservancy
// SPDX-License-Identifier: Apache-2.0
// SPDX-License-Identifier: MIT
use crate::u2f_crate::messages::*;
use crate::u2f_crate::protocol::*;
use crate::u2f_crate::register::*;
use base64::{engine::general_purpose::URL_SAFE_NO_PAD, Engine};
use chrono::prelude::*;
use serde::Serialize;
use std::convert::Into;
static VERSION: &str = "U2F_V2";
pub fn make_challenge(app_id: &str, challenge_bytes: Vec<u8>) -> Challenge {
let utc: DateTime<Utc> = Utc::now();
Challenge {
challenge: URL_SAFE_NO_PAD.encode(challenge_bytes),
timestamp: format!("{utc:?}"),
app_id: app_id.to_string(),
}
}
#[derive(Serialize, Clone)]
#[serde(rename_all = "camelCase")]
pub struct RegistrationVerification {
pub key_handle: String,
pub pubkey: String,
pub device_name: Option<String>,
}
pub fn verify_registration(
app_id: String,
challenge: String,
register_data: String,
client_data: String,
) -> crate::Result<String> {
let challenge_bytes = URL_SAFE_NO_PAD.decode(challenge)?;
let challenge = make_challenge(&app_id, challenge_bytes);
let client_data_bytes: Vec<u8> = client_data.as_bytes().into();
let client_data_base64 = URL_SAFE_NO_PAD.encode(client_data_bytes);
let client = U2f::new(app_id);
match client.register_response(
challenge,
RegisterResponse {
registration_data: register_data,
client_data: client_data_base64,
version: VERSION.to_string(),
},
) {
Ok(v) => {
let rv = RegistrationVerification {
key_handle: URL_SAFE_NO_PAD.encode(&v.key_handle),
pubkey: URL_SAFE_NO_PAD.encode(&v.pub_key),
device_name: v.device_name,
};
Ok(serde_json::to_string(&rv)?)
}
Err(e) => Err(e.into()),
}
}
#[derive(Serialize, Clone)]
#[serde(rename_all = "camelCase")]
pub struct SignatureVerification {
pub counter: u8,
}
pub fn verify_signature(
app_id: String,
challenge: String,
sign_data: String,
client_data: String,
key_handle: String,
pub_key: String,
) -> crate::Result<u32> {
let challenge_bytes = URL_SAFE_NO_PAD.decode(challenge)?;
let chal = make_challenge(&app_id, challenge_bytes);
let client_data_bytes: Vec<u8> = client_data.as_bytes().into();
let client_data_base64 = URL_SAFE_NO_PAD.encode(client_data_bytes);
let key_handle_bytes = URL_SAFE_NO_PAD.decode(&key_handle)?;
let pubkey_bytes = URL_SAFE_NO_PAD.decode(pub_key)?;
let client = U2f::new(app_id);
let mut _counter: u32 = 0;
match client.sign_response(
chal,
Registration {
// here only needs pubkey and keyhandle
key_handle: key_handle_bytes,
pub_key: pubkey_bytes,
attestation_cert: None,
device_name: None,
},
SignResponse {
// here needs client data and sig data and key_handle
signature_data: sign_data,
client_data: client_data_base64,
key_handle,
},
_counter,
) {
Ok(v) => Ok(v),
Err(e) => Err(e.into()),
}
}

@ -1,8 +0,0 @@
Copyright (c) 2017
Licensed under either of
* Apache License, Version 2.0, (http://www.apache.org/licenses/LICENSE-2.0)
* MIT license (http://opensource.org/licenses/MIT)
at your option.

@ -1,65 +0,0 @@
// Copyright 2021 Flavio Oliveira
// SPDX-License-Identifier: Apache-2.0
// SPDX-License-Identifier: MIT
use bytes::{Buf, BufMut};
use openssl::sha::sha256;
use serde::Serialize;
use std::io::Cursor;
use crate::u2f_crate::u2ferror::U2fError;
/// The `Result` type used in this crate.
type Result<T> = ::std::result::Result<T, U2fError>;
#[derive(Serialize, Clone)]
#[serde(rename_all = "camelCase")]
pub struct Authorization {
pub counter: u32,
pub user_presence: bool,
}
pub fn parse_sign_response(
app_id: String,
client_data: Vec<u8>,
public_key: Vec<u8>,
sign_data: Vec<u8>,
) -> Result<Authorization> {
if sign_data.len() <= 5 {
return Err(U2fError::InvalidSignatureData);
}
let user_presence_flag = &sign_data[0];
let counter = &sign_data[1..=4];
let signature = &sign_data[5..];
// Let's build the msg to verify the signature
let app_id_hash = sha256(&app_id.into_bytes());
let client_data_hash = sha256(&client_data[..]);
let mut msg = vec![];
msg.put(app_id_hash.as_ref());
msg.put_u8(*user_presence_flag);
msg.put(counter);
msg.put(client_data_hash.as_ref());
let public_key = super::crypto::NISTP256Key::from_bytes(&public_key)?;
// The signature is to be verified by the relying party using the public key obtained during registration.
let verified = public_key.verify_signature(signature, msg.as_ref())?;
if !verified {
return Err(U2fError::BadSignature);
}
let authorization = Authorization {
counter: get_counter(counter),
user_presence: true,
};
Ok(authorization)
}
fn get_counter(counter: &[u8]) -> u32 {
let mut buf = Cursor::new(counter);
buf.get_u32()
}

@ -1,156 +0,0 @@
// Copyright 2021 Flavio Oliveira
// SPDX-License-Identifier: Apache-2.0
// SPDX-License-Identifier: MIT
//! Cryptographic operation wrapper for Webauthn. This module exists to
//! allow ease of auditing, safe operation wrappers for the webauthn library,
//! and cryptographic provider abstraction. This module currently uses OpenSSL
//! as the cryptographic primitive provider.
// Source can be found here: https://github.com/Firstyear/webauthn-rs/blob/master/src/crypto.rs
#![allow(non_camel_case_types)]
use openssl::{bn, ec, hash, nid, sign, x509};
use std::convert::TryFrom;
// use super::constants::*;
use crate::u2f_crate::u2ferror::U2fError;
use openssl::pkey::Public;
// use super::proto::*;
// Why OpenSSL over another rust crate?
// - Well, the openssl crate allows us to reconstruct a public key from the
// x/y group coords, where most others want a pkcs formatted structure. As
// a result, it's easiest to use openssl as it gives us exactly what we need
// for these operations, and despite it's many challenges as a library, it
// has resources and investment into it's maintenance, so we can a least
// assert a higher level of confidence in it that <backyard crypto here>.
// Object({Integer(-3): Bytes([48, 185, 178, 204, 113, 186, 105, 138, 190, 33, 160, 46, 131, 253, 100, 177, 91, 243, 126, 128, 245, 119, 209, 59, 186, 41, 215, 196, 24, 222, 46, 102]), Integer(-2): Bytes([158, 212, 171, 234, 165, 197, 86, 55, 141, 122, 253, 6, 92, 242, 242, 114, 158, 221, 238, 163, 127, 214, 120, 157, 145, 226, 232, 250, 144, 150, 218, 138]), Integer(-1): U64(1), Integer(1): U64(2), Integer(3): I64(-7)})
//
/// An X509PublicKey. This is what is otherwise known as a public certificate
/// which comprises a public key and other signed metadata related to the issuer
/// of the key.
pub struct X509PublicKey {
pubk: x509::X509,
}
impl std::fmt::Debug for X509PublicKey {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "X509PublicKey")
}
}
impl TryFrom<&[u8]> for X509PublicKey {
type Error = U2fError;
// Must be DER bytes. If you have PEM, base64decode first!
fn try_from(d: &[u8]) -> Result<Self, Self::Error> {
let pubk = x509::X509::from_der(d)?;
Ok(X509PublicKey { pubk })
}
}
impl X509PublicKey {
pub(crate) fn common_name(&self) -> Option<String> {
let cert = &self.pubk;
let subject = cert.subject_name();
let common = subject
.entries_by_nid(openssl::nid::Nid::COMMONNAME)
.next()
.map(|b| b.data().as_slice());
if let Some(common) = common {
std::str::from_utf8(common).ok().map(|s| s.to_string())
} else {
None
}
}
pub(crate) fn is_secp256r1(&self) -> Result<bool, U2fError> {
// Can we get the public key?
let pk = self.pubk.public_key()?;
let ec_key = pk.ec_key()?;
ec_key.check_key()?;
let ec_grpref = ec_key.group();
let ec_curve = ec_grpref.curve_name().ok_or(U2fError::OpenSSLNoCurveName)?;
Ok(ec_curve == nid::Nid::X9_62_PRIME256V1)
}
pub(crate) fn verify_signature(
&self,
signature: &[u8],
verification_data: &[u8],
) -> Result<bool, U2fError> {
let pkey = self.pubk.public_key()?;
// TODO: Should this determine the hash type from the x509 cert? Or other?
let mut verifier = sign::Verifier::new(hash::MessageDigest::sha256(), &pkey)?;
verifier.update(verification_data)?;
Ok(verifier.verify(signature)?)
}
}
pub struct NISTP256Key {
/// The key's public X coordinate.
pub x: [u8; 32],
/// The key's public Y coordinate.
pub y: [u8; 32],
}
impl NISTP256Key {
pub fn from_bytes(public_key_bytes: &[u8]) -> Result<Self, U2fError> {
if public_key_bytes.len() != 65 {
return Err(U2fError::InvalidPublicKey);
}
if public_key_bytes[0] != 0x04 {
return Err(U2fError::InvalidPublicKey);
}
let mut x: [u8; 32] = Default::default();
x.copy_from_slice(&public_key_bytes[1..=32]);
let mut y: [u8; 32] = Default::default();
y.copy_from_slice(&public_key_bytes[33..=64]);
Ok(NISTP256Key { x, y })
}
fn get_key(&self) -> Result<ec::EcKey<Public>, U2fError> {
let ec_group = ec::EcGroup::from_curve_name(openssl::nid::Nid::X9_62_PRIME256V1)?;
let xbn = bn::BigNum::from_slice(&self.x)?;
let ybn = bn::BigNum::from_slice(&self.y)?;
let ec_key = openssl::ec::EcKey::from_public_key_affine_coordinates(&ec_group, &xbn, &ybn)?;
// Validate the key is sound. IIRC this actually checks the values
// are correctly on the curve as specified
ec_key.check_key()?;
Ok(ec_key)
}
pub fn verify_signature(
&self,
signature: &[u8],
verification_data: &[u8],
) -> Result<bool, U2fError> {
let pkey = self.get_key()?;
let signature = openssl::ecdsa::EcdsaSig::from_der(signature)?;
let hash = openssl::sha::sha256(verification_data);
Ok(signature.verify(hash.as_ref(), &pkey)?)
}
}

@ -1,54 +0,0 @@
// Copyright 2021 Flavio Oliveira
// SPDX-License-Identifier: Apache-2.0
// SPDX-License-Identifier: MIT
// As defined by FIDO U2F Javascript API.
// https://fidoalliance.org/specs/fido-u2f-v1.0-nfc-bt-amendment-20150514/fido-u2f-javascript-api.html#registration
use serde::{Deserialize, Serialize};
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
pub struct U2fRegisterRequest {
pub app_id: String,
pub register_requests: Vec<RegisterRequest>,
pub registered_keys: Vec<RegisteredKey>,
}
#[derive(Serialize)]
pub struct RegisterRequest {
pub version: String,
pub challenge: String,
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
pub struct RegisteredKey {
pub version: String,
pub key_handle: Option<String>,
pub app_id: String,
}
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct RegisterResponse {
pub registration_data: String,
pub version: String,
pub client_data: String,
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
pub struct U2fSignRequest {
pub app_id: String,
pub challenge: String,
pub registered_keys: Vec<RegisteredKey>,
}
#[derive(Clone, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct SignResponse {
pub key_handle: String,
pub signature_data: String,
pub client_data: String,
}

@ -1,12 +0,0 @@
// Copyright 2021 Flavio Oliveira
// SPDX-License-Identifier: Apache-2.0
// SPDX-License-Identifier: MIT
mod util;
pub mod authorization;
mod crypto;
pub mod messages;
pub mod protocol;
pub mod register;
pub mod u2ferror;

@ -1,191 +0,0 @@
// Copyright 2021 Flavio Oliveira
// SPDX-License-Identifier: Apache-2.0
// SPDX-License-Identifier: MIT
use crate::u2f_crate::authorization::*;
use crate::u2f_crate::messages::*;
use crate::u2f_crate::register::*;
use crate::u2f_crate::u2ferror::U2fError;
use crate::u2f_crate::util::*;
use base64::{engine::general_purpose::URL_SAFE_NO_PAD, Engine};
use chrono::prelude::*;
use chrono::Duration;
use serde::{Deserialize, Serialize};
type Result<T> = ::std::result::Result<T, U2fError>;
#[derive(Clone)]
pub struct U2f {
app_id: String,
}
#[derive(Deserialize, Serialize, Clone)]
#[serde(rename_all = "camelCase")]
pub struct Challenge {
pub app_id: String,
pub challenge: String,
pub timestamp: String,
}
impl Challenge {
// Not used in this plugin.
#[allow(dead_code)]
pub fn new() -> Self {
Challenge {
app_id: String::new(),
challenge: String::new(),
timestamp: String::new(),
}
}
}
impl U2f {
// The app ID is a string used to uniquely identify an U2F app
pub fn new(app_id: String) -> Self {
U2f { app_id }
}
// Not used in this plugin.
#[allow(dead_code)]
pub fn generate_challenge(&self) -> Result<Challenge> {
let utc: DateTime<Utc> = Utc::now();
let challenge_bytes = generate_challenge(32)?;
let challenge = Challenge {
challenge: URL_SAFE_NO_PAD.encode(challenge_bytes),
timestamp: format!("{:?}", utc),
app_id: self.app_id.clone(),
};
Ok(challenge.clone())
}
// Not used in this plugin.
#[allow(dead_code)]
pub fn request(
&self,
challenge: Challenge,
registrations: Vec<Registration>,
) -> Result<U2fRegisterRequest> {
let u2f_request = U2fRegisterRequest {
app_id: self.app_id.clone(),
register_requests: self.register_request(challenge),
registered_keys: self.registered_keys(registrations),
};
Ok(u2f_request)
}
fn register_request(&self, challenge: Challenge) -> Vec<RegisterRequest> {
let mut requests: Vec<RegisterRequest> = vec![];
let request = RegisterRequest {
version: U2F_V2.into(),
challenge: challenge.challenge,
};
requests.push(request);
requests
}
pub fn register_response(
&self,
challenge: Challenge,
response: RegisterResponse,
) -> Result<Registration> {
if expiration(challenge.timestamp) > Duration::seconds(300) {
return Err(U2fError::ChallengeExpired);
}
let registration_data: Vec<u8> = URL_SAFE_NO_PAD
.decode(&response.registration_data[..])
.unwrap();
let client_data: Vec<u8> = URL_SAFE_NO_PAD.decode(&response.client_data[..]).unwrap();
parse_registration(challenge.app_id, client_data, registration_data)
}
fn registered_keys(&self, registrations: Vec<Registration>) -> Vec<RegisteredKey> {
let mut keys: Vec<RegisteredKey> = vec![];
for registration in registrations {
keys.push(get_registered_key(
self.app_id.clone(),
registration.key_handle,
));
}
keys
}
// Not used in this plugin.
#[allow(dead_code)]
pub fn sign_request(
&self,
challenge: Challenge,
registrations: Vec<Registration>,
) -> U2fSignRequest {
let mut keys: Vec<RegisteredKey> = vec![];
for registration in registrations {
keys.push(get_registered_key(
self.app_id.clone(),
registration.key_handle,
));
}
let signed_request = U2fSignRequest {
app_id: self.app_id.clone(),
challenge: URL_SAFE_NO_PAD.encode(challenge.challenge.as_bytes()),
registered_keys: keys,
};
signed_request
}
pub fn sign_response(
&self,
challenge: Challenge,
reg: Registration,
sign_resp: SignResponse,
counter: u32,
) -> Result<u32> {
if expiration(challenge.timestamp) > Duration::seconds(300) {
return Err(U2fError::ChallengeExpired);
}
if sign_resp.key_handle != get_encoded(&reg.key_handle[..]) {
return Err(U2fError::WrongKeyHandler);
}
let client_data: Vec<u8> = URL_SAFE_NO_PAD
.decode(&sign_resp.client_data[..])
.map_err(|_e| U2fError::InvalidClientData)?;
let sign_data: Vec<u8> = URL_SAFE_NO_PAD
.decode(&sign_resp.signature_data[..])
.map_err(|_e| U2fError::InvalidSignatureData)?;
let public_key = reg.pub_key;
let auth = parse_sign_response(
self.app_id.clone(),
client_data.clone(),
public_key,
sign_data.clone(),
);
match auth {
Ok(ref res) => {
// CounterTooLow is raised when the counter value received from the device is
// lower than last stored counter value.
if res.counter < counter {
Err(U2fError::CounterTooLow)
} else {
Ok(res.counter)
}
}
Err(e) => Err(e),
}
}
}

@ -1,101 +0,0 @@
// Copyright 2021 Flavio Oliveira
// SPDX-License-Identifier: Apache-2.0
// SPDX-License-Identifier: MIT
use byteorder::{BigEndian, ByteOrder};
use bytes::{BufMut, Bytes};
use openssl::sha::sha256;
use serde::Serialize;
use crate::u2f_crate::messages::RegisteredKey;
use crate::u2f_crate::u2ferror::U2fError;
use crate::u2f_crate::util::*;
use std::convert::TryFrom;
/// The `Result` type used in this crate.
type Result<T> = ::std::result::Result<T, U2fError>;
// Single enrolment or pairing between an application and a token.
#[derive(Serialize, Clone)]
#[serde(rename_all = "camelCase")]
pub struct Registration {
pub key_handle: Vec<u8>,
pub pub_key: Vec<u8>,
// AttestationCert can be null for Authenticate requests.
pub attestation_cert: Option<Vec<u8>>,
pub device_name: Option<String>,
}
pub fn parse_registration(
app_id: String,
client_data: Vec<u8>,
registration_data: Vec<u8>,
) -> Result<Registration> {
let reserved_byte = registration_data[0];
if reserved_byte != 0x05 {
return Err(U2fError::InvalidReservedByte);
}
let mut mem = Bytes::from(registration_data);
//Start parsing ... advance the reserved byte.
let _ = mem.split_to(1);
// P-256 NIST elliptic curve
let public_key = mem.split_to(65);
// Key Handle
let key_handle_size = mem.split_to(1);
let key_len = BigEndian::read_uint(&key_handle_size[..], 1);
let key_handle = mem.split_to(key_len as usize);
// The certificate length needs to be inferred by parsing.
let cert_len = asn_length(mem.clone()).unwrap();
let attestation_certificate = mem.split_to(cert_len);
// Remaining data corresponds to the signature
let signature = mem;
// Let's build the msg to verify the signature
let app_id_hash = sha256(&app_id.into_bytes());
let client_data_hash = sha256(&client_data[..]);
let mut msg = vec![0x00]; // A byte reserved for future use [1 byte] with the value 0x00
msg.put(app_id_hash.as_ref());
msg.put(client_data_hash.as_ref());
msg.put(key_handle.clone());
msg.put(public_key.clone());
// The signature is to be verified by the relying party using the public key certified
// in the attestation certificate.
let cerificate_public_key =
super::crypto::X509PublicKey::try_from(&attestation_certificate[..])?;
if !(cerificate_public_key.is_secp256r1()?) {
return Err(U2fError::BadCertificate);
}
let verified = cerificate_public_key.verify_signature(&signature[..], &msg[..])?;
if !verified {
return Err(U2fError::BadCertificate);
}
let registration = Registration {
key_handle: key_handle[..].to_vec(),
pub_key: public_key[..].to_vec(),
attestation_cert: Some(attestation_certificate[..].to_vec()),
device_name: cerificate_public_key.common_name(),
};
Ok(registration)
}
pub fn get_registered_key(app_id: String, key_handle: Vec<u8>) -> RegisteredKey {
RegisteredKey {
app_id,
version: U2F_V2.into(),
key_handle: Some(get_encoded(key_handle.as_slice())),
}
}

@ -1,39 +0,0 @@
// Copyright 2021 Flavio Oliveira
// SPDX-License-Identifier: Apache-2.0
// SPDX-License-Identifier: MIT
use thiserror::Error;
#[derive(Debug, Error)]
pub enum U2fError {
#[error("ASM1 Decoder error")]
Asm1DecoderError,
#[error("Not able to verify signature")]
BadSignature,
#[error("Not able to generate random bytes")]
RandomSecureBytesError,
#[error("Invalid Reserved Byte")]
InvalidReservedByte,
#[error("Challenge Expired")]
ChallengeExpired,
#[error("Wrong Key Handler")]
WrongKeyHandler,
#[error("Invalid Client Data")]
InvalidClientData,
#[error("Invalid Signature Data")]
InvalidSignatureData,
#[error("Invalid User Presence Byte")]
InvalidUserPresenceByte,
#[error("Failed to parse certificate")]
BadCertificate,
#[error("Not Trusted Anchor")]
NotTrustedAnchor,
#[error("Counter too low")]
CounterTooLow,
#[error("Invalid public key")]
OpenSSLNoCurveName,
#[error("OpenSSL no curve name")]
InvalidPublicKey,
#[error(transparent)]
OpenSSLError(#[from] openssl::error::ErrorStack),
}

@ -1,66 +0,0 @@
// Copyright 2021 Flavio Oliveira
// SPDX-License-Identifier: Apache-2.0
// SPDX-License-Identifier: MIT
use crate::u2f_crate::u2ferror::U2fError;
use base64::{engine::general_purpose::URL_SAFE_NO_PAD, Engine};
use bytes::Bytes;
use chrono::prelude::*;
use chrono::Duration;
use openssl::rand;
/// The `Result` type used in this crate.
type Result<T> = ::std::result::Result<T, U2fError>;
pub const U2F_V2: &str = "U2F_V2";
// Generates a challenge from a secure, random source.
pub fn generate_challenge(size: usize) -> Result<Vec<u8>> {
let mut bytes: Vec<u8> = vec![0; size];
rand::rand_bytes(&mut bytes).map_err(|_e| U2fError::RandomSecureBytesError)?;
Ok(bytes)
}
pub fn expiration(timestamp: String) -> Duration {
let now: DateTime<Utc> = Utc::now();
let ts = timestamp.parse::<DateTime<Utc>>();
now.signed_duration_since(ts.unwrap())
}
// Decode initial bytes of buffer as ASN and return the length of the encoded structure.
// http://en.wikipedia.org/wiki/X.690
pub fn asn_length(mem: Bytes) -> Result<usize> {
let buffer: &[u8] = &mem[..];
if mem.len() < 2 || buffer[0] != 0x30 {
// Type
return Err(U2fError::Asm1DecoderError);
}
let len = buffer[1]; // Len
if len & 0x80 == 0 {
return Ok((len & 0x7f) as usize);
}
let numbem_of_bytes = len & 0x7f;
if numbem_of_bytes == 0 {
return Err(U2fError::Asm1DecoderError);
}
let mut length: usize = 0;
for num in 0..numbem_of_bytes {
length = length * 0x100 + (buffer[(2 + num) as usize] as usize);
}
length += numbem_of_bytes as usize;
Ok(length + 2) // Add the 2 initial bytes: type and length.
}
pub fn get_encoded(data: &[u8]) -> String {
let encoded: String = URL_SAFE_NO_PAD.encode(data);
encoded.trim_end_matches('=').to_string()
}

@ -1,4 +0,0 @@
{
"extends": "../../tsconfig.base.json",
"include": ["guest-js/*.ts"]
}

@ -4,7 +4,7 @@ Automatically launch your application at startup. Supports Windows, Mac (via App
## Install ## Install
_This plugin requires a Rust version of at least **1.64**_ _This plugin requires a Rust version of at least **1.67**_
There are three general methods of installation that we can recommend. There are three general methods of installation that we can recommend.

@ -24,7 +24,7 @@
"LICENSE" "LICENSE"
], ],
"devDependencies": { "devDependencies": {
"tslib": "2.6.3" "tslib": "2.8.1"
}, },
"dependencies": { "dependencies": {
"@tauri-apps/api": "1.6.0" "@tauri-apps/api": "1.6.0"

@ -4,7 +4,7 @@ Additional file system methods not included in the core API.
## Install ## Install
_This plugin requires a Rust version of at least **1.64**_ _This plugin requires a Rust version of at least **1.67**_
There are three general methods of installation that we can recommend. There are three general methods of installation that we can recommend.

@ -25,7 +25,7 @@
"LICENSE" "LICENSE"
], ],
"devDependencies": { "devDependencies": {
"tslib": "2.6.3" "tslib": "2.8.1"
}, },
"dependencies": { "dependencies": {
"@tauri-apps/api": "1.6.0" "@tauri-apps/api": "1.6.0"

@ -78,9 +78,10 @@ struct Metadata {
} }
fn system_time_to_ms(time: std::io::Result<SystemTime>) -> u64 { fn system_time_to_ms(time: std::io::Result<SystemTime>) -> u64 {
time.map(|t| { time.map(|time| {
let duration_since_epoch = t.duration_since(UNIX_EPOCH).unwrap(); time.duration_since(UNIX_EPOCH)
duration_since_epoch.as_millis() as u64 .map(|t| t.as_millis() as u64)
.unwrap_or_else(|err| err.duration().as_millis() as u64)
}) })
.unwrap_or_default() .unwrap_or_default()
} }

@ -4,7 +4,7 @@ Watch files and directories for changes using [notify](https://github.com/notify
## Install ## Install
_This plugin requires a Rust version of at least **1.64**_ _This plugin requires a Rust version of at least **1.67**_
There are three general methods of installation that we can recommend. There are three general methods of installation that we can recommend.

@ -25,7 +25,7 @@
"LICENSE" "LICENSE"
], ],
"devDependencies": { "devDependencies": {
"tslib": "2.6.3" "tslib": "2.8.1"
}, },
"dependencies": { "dependencies": {
"@tauri-apps/api": "1.6.0" "@tauri-apps/api": "1.6.0"

@ -6,7 +6,7 @@ Expose your apps assets through a localhost server instead of the default custom
## Install ## Install
_This plugin requires a Rust version of at least **1.64**_ _This plugin requires a Rust version of at least **1.67**_
There are three general methods of installation that we can recommend. There are three general methods of installation that we can recommend.

@ -63,7 +63,7 @@ impl Builder {
let asset_resolver = app.asset_resolver(); let asset_resolver = app.asset_resolver();
std::thread::spawn(move || { std::thread::spawn(move || {
let server = let server =
Server::http(&format!("localhost:{port}")).expect("Unable to spawn server"); Server::http(format!("localhost:{port}")).expect("Unable to spawn server");
for req in server.incoming_requests() { for req in server.incoming_requests() {
let path = req let path = req
.url() .url()

@ -4,7 +4,7 @@ Configurable logging for your Tauri app.
## Install ## Install
_This plugin requires a Rust version of at least **1.64**_ _This plugin requires a Rust version of at least **1.67**_
There are three general methods of installation that we can recommend. There are three general methods of installation that we can recommend.

@ -25,7 +25,7 @@
"LICENSE" "LICENSE"
], ],
"devDependencies": { "devDependencies": {
"tslib": "2.6.3" "tslib": "2.8.1"
}, },
"dependencies": { "dependencies": {
"@tauri-apps/api": "1.6.0" "@tauri-apps/api": "1.6.0"

@ -1,4 +1,3 @@
authenticator
autostart autostart
fs-extra fs-extra
fs-watch fs-watch

@ -12,6 +12,6 @@
## \[0.1.1] ## \[0.1.1]
- The MSRV was raised to 1.64! - The MSRV was raised to 1.67!
- The plugin now recursively unescapes saved patterns before allowing/forbidding them. This effectively prevents `.persisted-scope` files from blowing up, which caused Out-Of-Memory issues, while automatically fixing existing broken files seamlessly. - The plugin now recursively unescapes saved patterns before allowing/forbidding them. This effectively prevents `.persisted-scope` files from blowing up, which caused Out-Of-Memory issues, while automatically fixing existing broken files seamlessly.
- [ebb2eb2](https://github.com/tauri-apps/plugins-workspace/commit/ebb2eb2fe2ebfbb70530d16a983d396aa5829aa1) fix(persisted-scope): Prevent out-of-memory issues, fixes [#274](https://github.com/tauri-apps/plugins-workspace/pull/274) ([#328](https://github.com/tauri-apps/plugins-workspace/pull/328)) on 2023-04-26 - [ebb2eb2](https://github.com/tauri-apps/plugins-workspace/commit/ebb2eb2fe2ebfbb70530d16a983d396aa5829aa1) fix(persisted-scope): Prevent out-of-memory issues, fixes [#274](https://github.com/tauri-apps/plugins-workspace/pull/274) ([#328](https://github.com/tauri-apps/plugins-workspace/pull/328)) on 2023-04-26

@ -4,7 +4,7 @@ Save filesystem and asset scopes and restore them when the app is reopened.
## Install ## Install
_This plugin requires a Rust version of at least **1.64**_ _This plugin requires a Rust version of at least **1.67**_
There are three general methods of installation that we can recommend. There are three general methods of installation that we can recommend.

@ -6,7 +6,7 @@ This plugin is a port of [electron-positioner](https://github.com/jenslind/elect
## Install ## Install
_This plugin requires a Rust version of at least **1.64**_ _This plugin requires a Rust version of at least **1.67**_
There are three general methods of installation that we can recommend. There are three general methods of installation that we can recommend.

@ -25,7 +25,7 @@
"LICENSE" "LICENSE"
], ],
"devDependencies": { "devDependencies": {
"tslib": "2.6.3" "tslib": "2.8.1"
}, },
"dependencies": { "dependencies": {
"@tauri-apps/api": "1.6.0" "@tauri-apps/api": "1.6.0"

@ -19,7 +19,7 @@ thiserror = { workspace = true }
semver = { version = "1", optional = true } semver = { version = "1", optional = true }
[target.'cfg(target_os = "windows")'.dependencies.windows-sys] [target.'cfg(target_os = "windows")'.dependencies.windows-sys]
version = "0.52" version = "0.59"
features = [ features = [
"Win32_System_Threading", "Win32_System_Threading",
"Win32_System_DataExchange", "Win32_System_DataExchange",

@ -4,7 +4,7 @@ Ensure a single instance of your tauri app is running.
## Install ## Install
_This plugin requires a Rust version of at least **1.64**_ _This plugin requires a Rust version of at least **1.67**_
There are three general methods of installation that we can recommend. There are three general methods of installation that we can recommend.

@ -9,6 +9,6 @@
"author": "", "author": "",
"license": "MIT", "license": "MIT",
"devDependencies": { "devDependencies": {
"@tauri-apps/cli": "1.6.0" "@tauri-apps/cli": "1.6.3"
} }
} }

@ -1,5 +1,3 @@
#![cfg(target_os = "linux")]
use std::sync::Arc; use std::sync::Arc;
#[cfg(feature = "semver")] #[cfg(feature = "semver")]

@ -1,12 +1,10 @@
#![cfg(target_os = "macos")] use crate::SingleInstanceCallback;
use tauri::{
use crate::SingleInstanceCallback; plugin::{self, TauriPlugin},
use tauri::{ Manager, Runtime,
plugin::{self, TauriPlugin}, };
Manager, Runtime, pub fn init<R: Runtime>(_f: Box<SingleInstanceCallback<R>>) -> TauriPlugin<R> {
}; plugin::Builder::new("single-instance").build()
pub fn init<R: Runtime>(_f: Box<SingleInstanceCallback<R>>) -> TauriPlugin<R> { }
plugin::Builder::new("single-instance").build()
} pub fn destroy<R: Runtime, M: Manager<R>>(_manager: &M) {}
pub fn destroy<R: Runtime, M: Manager<R>>(_manager: &M) {}

@ -1,5 +1,3 @@
#![cfg(target_os = "windows")]
#[cfg(feature = "semver")] #[cfg(feature = "semver")]
use crate::semver_compat::semver_compat_string; use crate::semver_compat::semver_compat_string;
@ -51,7 +49,7 @@ pub fn init<R: Runtime>(f: Box<SingleInstanceCallback<R>>) -> TauriPlugin<R> {
unsafe { unsafe {
let hwnd = FindWindowW(class_name.as_ptr(), window_name.as_ptr()); let hwnd = FindWindowW(class_name.as_ptr(), window_name.as_ptr());
if hwnd != 0 { if !hwnd.is_null() {
let data = format!( let data = format!(
"{}|{}\0", "{}|{}\0",
std::env::current_dir() std::env::current_dir()
@ -71,7 +69,7 @@ pub fn init<R: Runtime>(f: Box<SingleInstanceCallback<R>>) -> TauriPlugin<R> {
} }
} }
} else { } else {
app.manage(MutexHandle(hmutex)); app.manage(MutexHandle(hmutex as _));
let hwnd = create_event_target_window::<R>(&class_name, &window_name); let hwnd = create_event_target_window::<R>(&class_name, &window_name);
unsafe { unsafe {
@ -82,7 +80,7 @@ pub fn init<R: Runtime>(f: Box<SingleInstanceCallback<R>>) -> TauriPlugin<R> {
) )
}; };
app.manage(TargetWindowHandle(hwnd)); app.manage(TargetWindowHandle(hwnd as _));
} }
Ok(()) Ok(())
@ -98,12 +96,12 @@ pub fn init<R: Runtime>(f: Box<SingleInstanceCallback<R>>) -> TauriPlugin<R> {
pub fn destroy<R: Runtime, M: Manager<R>>(manager: &M) { pub fn destroy<R: Runtime, M: Manager<R>>(manager: &M) {
if let Some(hmutex) = manager.try_state::<MutexHandle>() { if let Some(hmutex) = manager.try_state::<MutexHandle>() {
unsafe { unsafe {
ReleaseMutex(hmutex.0); ReleaseMutex(hmutex.0 as _);
CloseHandle(hmutex.0); CloseHandle(hmutex.0 as _);
} }
} }
if let Some(hwnd) = manager.try_state::<TargetWindowHandle>() { if let Some(hwnd) = manager.try_state::<TargetWindowHandle>() {
unsafe { DestroyWindow(hwnd.0) }; unsafe { DestroyWindow(hwnd.0 as _) };
} }
} }
@ -115,6 +113,9 @@ unsafe extern "system" fn single_instance_window_proc<R: Runtime>(
) -> LRESULT { ) -> LRESULT {
let data_ptr = GetWindowLongPtrW(hwnd, GWL_USERDATA) let data_ptr = GetWindowLongPtrW(hwnd, GWL_USERDATA)
as *mut (AppHandle<R>, Box<SingleInstanceCallback<R>>); as *mut (AppHandle<R>, Box<SingleInstanceCallback<R>>);
if data_ptr.is_null() {
return DefWindowProcW(hwnd, msg, wparam, lparam);
}
let (app_handle, callback) = &mut *data_ptr; let (app_handle, callback) = &mut *data_ptr;
match msg { match msg {
@ -147,12 +148,12 @@ fn create_event_target_window<R: Runtime>(class_name: &[u16], window_name: &[u16
cbClsExtra: 0, cbClsExtra: 0,
cbWndExtra: 0, cbWndExtra: 0,
hInstance: GetModuleHandleW(std::ptr::null()), hInstance: GetModuleHandleW(std::ptr::null()),
hIcon: 0, hIcon: std::ptr::null_mut(),
hCursor: 0, hCursor: std::ptr::null_mut(),
hbrBackground: 0, hbrBackground: std::ptr::null_mut(),
lpszMenuName: std::ptr::null(), lpszMenuName: std::ptr::null(),
lpszClassName: class_name.as_ptr(), lpszClassName: class_name.as_ptr(),
hIconSm: 0, hIconSm: std::ptr::null_mut(),
}; };
RegisterClassExW(&class); RegisterClassExW(&class);
@ -176,8 +177,8 @@ fn create_event_target_window<R: Runtime>(class_name: &[u16], window_name: &[u16
0, 0,
0, 0,
0, 0,
0, std::ptr::null_mut(),
0, std::ptr::null_mut(),
GetModuleHandleW(std::ptr::null()), GetModuleHandleW(std::ptr::null()),
std::ptr::null(), std::ptr::null(),
); );

@ -1,5 +1,3 @@
#![cfg(feature = "semver")]
/// Takes a version and spits out a String with trailing _x, thus only considering the digits /// Takes a version and spits out a String with trailing _x, thus only considering the digits
/// relevant regarding semver compatibility /// relevant regarding semver compatibility
pub fn semver_compat_string(version: semver::Version) -> String { pub fn semver_compat_string(version: semver::Version) -> String {

@ -6,7 +6,7 @@ authors = { workspace = true }
license = { workspace = true } license = { workspace = true }
edition = { workspace = true } edition = { workspace = true }
#rust-version = { workspace = true } #rust-version = { workspace = true }
rust-version = "1.65" rust-version = "1.80"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
@ -17,7 +17,7 @@ tauri = { workspace = true }
log = { workspace = true } log = { workspace = true }
thiserror = { workspace = true } thiserror = { workspace = true }
futures-core = "0.3" futures-core = "0.3"
sqlx = { version = "0.7", features = ["json", "time"] } sqlx = { version = "0.8", features = ["json", "time"] }
time = "0.3" time = "0.3"
tokio = { version = "1", features = ["sync"] } tokio = { version = "1", features = ["sync"] }

@ -4,7 +4,7 @@ Interface with SQL databases through [sqlx](https://github.com/launchbadge/sqlx)
## Install ## Install
_This plugin requires a Rust version of at least **1.65**_ _This plugin requires a Rust version of at least **1.80**_
There are three general methods of installation that we can recommend. There are three general methods of installation that we can recommend.

@ -25,7 +25,7 @@
"LICENSE" "LICENSE"
], ],
"devDependencies": { "devDependencies": {
"tslib": "2.6.3" "tslib": "2.8.1"
}, },
"dependencies": { "dependencies": {
"@tauri-apps/api": "1.6.0" "@tauri-apps/api": "1.6.0"

@ -1,336 +1,337 @@
// Copyright 2021 Tauri Programme within The Commons Conservancy // Copyright 2021 Tauri Programme within The Commons Conservancy
// SPDX-License-Identifier: Apache-2.0 // SPDX-License-Identifier: Apache-2.0
// SPDX-License-Identifier: MIT // SPDX-License-Identifier: MIT
use futures_core::future::BoxFuture; use futures_core::future::BoxFuture;
use serde::{ser::Serializer, Deserialize, Serialize}; use serde::{ser::Serializer, Deserialize, Serialize};
use serde_json::Value as JsonValue; use serde_json::Value as JsonValue;
use sqlx::{ use sqlx::{
error::BoxDynError, error::BoxDynError,
migrate::{ migrate::{
MigrateDatabase, Migration as SqlxMigration, MigrationSource, MigrationType, Migrator, MigrateDatabase, Migration as SqlxMigration, MigrationSource, MigrationType, Migrator,
}, },
Column, Pool, Row, Column, Pool, Row,
}; };
use tauri::{ use tauri::{
command, command,
plugin::{Builder as PluginBuilder, TauriPlugin}, plugin::{Builder as PluginBuilder, TauriPlugin},
AppHandle, Manager, RunEvent, Runtime, State, AppHandle, Manager, RunEvent, Runtime, State,
}; };
use tokio::sync::Mutex; use tokio::sync::Mutex;
use std::collections::HashMap; use std::collections::HashMap;
#[cfg(feature = "sqlite")] #[cfg(feature = "sqlite")]
use std::{fs::create_dir_all, path::PathBuf}; use std::{fs::create_dir_all, path::PathBuf};
#[cfg(feature = "sqlite")] #[cfg(feature = "sqlite")]
type Db = sqlx::sqlite::Sqlite; type Db = sqlx::sqlite::Sqlite;
#[cfg(feature = "mysql")] #[cfg(feature = "mysql")]
type Db = sqlx::mysql::MySql; type Db = sqlx::mysql::MySql;
#[cfg(feature = "postgres")] #[cfg(feature = "postgres")]
type Db = sqlx::postgres::Postgres; type Db = sqlx::postgres::Postgres;
#[cfg(feature = "sqlite")] #[cfg(feature = "sqlite")]
type LastInsertId = i64; type LastInsertId = i64;
#[cfg(not(feature = "sqlite"))] #[cfg(not(feature = "sqlite"))]
type LastInsertId = u64; type LastInsertId = u64;
#[derive(Debug, thiserror::Error)] #[derive(Debug, thiserror::Error)]
pub enum Error { pub enum Error {
#[error(transparent)] #[error(transparent)]
Sql(#[from] sqlx::Error), Sql(#[from] sqlx::Error),
#[error(transparent)] #[error(transparent)]
Migration(#[from] sqlx::migrate::MigrateError), Migration(#[from] sqlx::migrate::MigrateError),
#[error("database {0} not loaded")] #[error("database {0} not loaded")]
DatabaseNotLoaded(String), DatabaseNotLoaded(String),
#[error("unsupported datatype: {0}")] #[error("unsupported datatype: {0}")]
UnsupportedDatatype(String), UnsupportedDatatype(String),
} }
impl Serialize for Error { impl Serialize for Error {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error> fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where where
S: Serializer, S: Serializer,
{ {
serializer.serialize_str(self.to_string().as_ref()) serializer.serialize_str(self.to_string().as_ref())
} }
} }
type Result<T> = std::result::Result<T, Error>; type Result<T> = std::result::Result<T, Error>;
#[cfg(feature = "sqlite")] #[cfg(feature = "sqlite")]
/// Resolves the App's **file path** from the `AppHandle` context /// Resolves the App's **file path** from the `AppHandle` context
/// object /// object
fn app_path<R: Runtime>(app: &AppHandle<R>) -> PathBuf { fn app_path<R: Runtime>(app: &AppHandle<R>) -> PathBuf {
#[allow(deprecated)] // FIXME: Change to non-deprecated function in Tauri v2 #[allow(deprecated)] // FIXME: Change to non-deprecated function in Tauri v2
app.path_resolver() app.path_resolver()
.app_dir() .app_dir()
.expect("No App path was found!") .expect("No App path was found!")
} }
#[cfg(feature = "sqlite")] #[cfg(feature = "sqlite")]
/// Maps the user supplied DB connection string to a connection string /// Maps the user supplied DB connection string to a connection string
/// with a fully qualified file path to the App's designed "app_path" /// with a fully qualified file path to the App's designed "app_path"
fn path_mapper(mut app_path: PathBuf, connection_string: &str) -> String { fn path_mapper(mut app_path: PathBuf, connection_string: &str) -> String {
app_path.push( app_path.push(
connection_string connection_string
.split_once(':') .split_once(':')
.expect("Couldn't parse the connection string for DB!") .expect("Couldn't parse the connection string for DB!")
.1, .1,
); );
format!( format!(
"sqlite:{}", "sqlite:{}",
app_path app_path
.to_str() .to_str()
.expect("Problem creating fully qualified path to Database file!") .expect("Problem creating fully qualified path to Database file!")
) )
} }
#[derive(Default)] #[derive(Default)]
struct DbInstances(Mutex<HashMap<String, Pool<Db>>>); struct DbInstances(Mutex<HashMap<String, Pool<Db>>>);
struct Migrations(Mutex<HashMap<String, MigrationList>>); struct Migrations(Mutex<HashMap<String, MigrationList>>);
#[derive(Default, Deserialize)] #[derive(Default, Deserialize)]
pub struct PluginConfig { pub struct PluginConfig {
#[serde(default)] #[serde(default)]
preload: Vec<String>, preload: Vec<String>,
} }
#[derive(Debug)] #[derive(Debug)]
pub enum MigrationKind { pub enum MigrationKind {
Up, Up,
Down, Down,
} }
impl From<MigrationKind> for MigrationType { impl From<MigrationKind> for MigrationType {
fn from(kind: MigrationKind) -> Self { fn from(kind: MigrationKind) -> Self {
match kind { match kind {
MigrationKind::Up => Self::ReversibleUp, MigrationKind::Up => Self::ReversibleUp,
MigrationKind::Down => Self::ReversibleDown, MigrationKind::Down => Self::ReversibleDown,
} }
} }
} }
/// A migration definition. /// A migration definition.
#[derive(Debug)] #[derive(Debug)]
pub struct Migration { pub struct Migration {
pub version: i64, pub version: i64,
pub description: &'static str, pub description: &'static str,
pub sql: &'static str, pub sql: &'static str,
pub kind: MigrationKind, pub kind: MigrationKind,
} }
#[derive(Debug)] #[derive(Debug)]
struct MigrationList(Vec<Migration>); struct MigrationList(Vec<Migration>);
impl MigrationSource<'static> for MigrationList { impl MigrationSource<'static> for MigrationList {
fn resolve(self) -> BoxFuture<'static, std::result::Result<Vec<SqlxMigration>, BoxDynError>> { fn resolve(self) -> BoxFuture<'static, std::result::Result<Vec<SqlxMigration>, BoxDynError>> {
Box::pin(async move { Box::pin(async move {
let mut migrations = Vec::new(); let mut migrations = Vec::new();
for migration in self.0 { for migration in self.0 {
if matches!(migration.kind, MigrationKind::Up) { if matches!(migration.kind, MigrationKind::Up) {
migrations.push(SqlxMigration::new( migrations.push(SqlxMigration::new(
migration.version, migration.version,
migration.description.into(), migration.description.into(),
migration.kind.into(), migration.kind.into(),
migration.sql.into(), migration.sql.into(),
)); false,
} ));
} }
Ok(migrations) }
}) Ok(migrations)
} })
} }
}
#[command]
async fn load<R: Runtime>( #[command]
#[allow(unused_variables)] app: AppHandle<R>, async fn load<R: Runtime>(
db_instances: State<'_, DbInstances>, #[allow(unused_variables)] app: AppHandle<R>,
migrations: State<'_, Migrations>, db_instances: State<'_, DbInstances>,
db: String, migrations: State<'_, Migrations>,
) -> Result<String> { db: String,
#[cfg(feature = "sqlite")] ) -> Result<String> {
let fqdb = path_mapper(app_path(&app), &db); #[cfg(feature = "sqlite")]
#[cfg(not(feature = "sqlite"))] let fqdb = path_mapper(app_path(&app), &db);
let fqdb = db.clone(); #[cfg(not(feature = "sqlite"))]
let fqdb = db.clone();
#[cfg(feature = "sqlite")]
create_dir_all(app_path(&app)).expect("Problem creating App directory!"); #[cfg(feature = "sqlite")]
create_dir_all(app_path(&app)).expect("Problem creating App directory!");
if !Db::database_exists(&fqdb).await.unwrap_or(false) {
Db::create_database(&fqdb).await?; if !Db::database_exists(&fqdb).await.unwrap_or(false) {
} Db::create_database(&fqdb).await?;
let pool = Pool::connect(&fqdb).await?; }
let pool = Pool::connect(&fqdb).await?;
if let Some(migrations) = migrations.0.lock().await.remove(&db) {
let migrator = Migrator::new(migrations).await?; if let Some(migrations) = migrations.0.lock().await.remove(&db) {
migrator.run(&pool).await?; let migrator = Migrator::new(migrations).await?;
} migrator.run(&pool).await?;
}
db_instances.0.lock().await.insert(db.clone(), pool);
Ok(db) db_instances.0.lock().await.insert(db.clone(), pool);
} Ok(db)
}
/// Allows the database connection(s) to be closed; if no database
/// name is passed in then _all_ database connection pools will be /// Allows the database connection(s) to be closed; if no database
/// shut down. /// name is passed in then _all_ database connection pools will be
#[command] /// shut down.
async fn close(db_instances: State<'_, DbInstances>, db: Option<String>) -> Result<bool> { #[command]
let mut instances = db_instances.0.lock().await; async fn close(db_instances: State<'_, DbInstances>, db: Option<String>) -> Result<bool> {
let mut instances = db_instances.0.lock().await;
let pools = if let Some(db) = db {
vec![db] let pools = if let Some(db) = db {
} else { vec![db]
instances.keys().cloned().collect() } else {
}; instances.keys().cloned().collect()
};
for pool in pools {
let db = instances for pool in pools {
.get_mut(&pool) // let db = instances
.ok_or(Error::DatabaseNotLoaded(pool))?; .get_mut(&pool) //
db.close().await; .ok_or(Error::DatabaseNotLoaded(pool))?;
} db.close().await;
}
Ok(true)
} Ok(true)
}
/// Execute a command against the database
#[command] /// Execute a command against the database
async fn execute( #[command]
db_instances: State<'_, DbInstances>, async fn execute(
db: String, db_instances: State<'_, DbInstances>,
query: String, db: String,
values: Vec<JsonValue>, query: String,
) -> Result<(u64, LastInsertId)> { values: Vec<JsonValue>,
let mut instances = db_instances.0.lock().await; ) -> Result<(u64, LastInsertId)> {
let mut instances = db_instances.0.lock().await;
let db = instances.get_mut(&db).ok_or(Error::DatabaseNotLoaded(db))?;
let mut query = sqlx::query(&query); let db = instances.get_mut(&db).ok_or(Error::DatabaseNotLoaded(db))?;
for value in values { let mut query = sqlx::query(&query);
if value.is_null() { for value in values {
query = query.bind(None::<JsonValue>); if value.is_null() {
} else if value.is_string() { query = query.bind(None::<JsonValue>);
query = query.bind(value.as_str().unwrap().to_owned()) } else if value.is_string() {
} else { query = query.bind(value.as_str().unwrap().to_owned())
query = query.bind(value); } else {
} query = query.bind(value);
} }
let result = query.execute(&*db).await?; }
#[cfg(feature = "sqlite")] let result = query.execute(&*db).await?;
let r = Ok((result.rows_affected(), result.last_insert_rowid())); #[cfg(feature = "sqlite")]
#[cfg(feature = "mysql")] let r = Ok((result.rows_affected(), result.last_insert_rowid()));
let r = Ok((result.rows_affected(), result.last_insert_id())); #[cfg(feature = "mysql")]
#[cfg(feature = "postgres")] let r = Ok((result.rows_affected(), result.last_insert_id()));
let r = Ok((result.rows_affected(), 0)); #[cfg(feature = "postgres")]
r let r = Ok((result.rows_affected(), 0));
} r
}
#[command]
async fn select( #[command]
db_instances: State<'_, DbInstances>, async fn select(
db: String, db_instances: State<'_, DbInstances>,
query: String, db: String,
values: Vec<JsonValue>, query: String,
) -> Result<Vec<HashMap<String, JsonValue>>> { values: Vec<JsonValue>,
let mut instances = db_instances.0.lock().await; ) -> Result<Vec<HashMap<String, JsonValue>>> {
let db = instances.get_mut(&db).ok_or(Error::DatabaseNotLoaded(db))?; let mut instances = db_instances.0.lock().await;
let mut query = sqlx::query(&query); let db = instances.get_mut(&db).ok_or(Error::DatabaseNotLoaded(db))?;
for value in values { let mut query = sqlx::query(&query);
if value.is_null() { for value in values {
query = query.bind(None::<JsonValue>); if value.is_null() {
} else if value.is_string() { query = query.bind(None::<JsonValue>);
query = query.bind(value.as_str().unwrap().to_owned()) } else if value.is_string() {
} else { query = query.bind(value.as_str().unwrap().to_owned())
query = query.bind(value); } else {
} query = query.bind(value);
} }
let rows = query.fetch_all(&*db).await?; }
let mut values = Vec::new(); let rows = query.fetch_all(&*db).await?;
for row in rows { let mut values = Vec::new();
let mut value = HashMap::default(); for row in rows {
for (i, column) in row.columns().iter().enumerate() { let mut value = HashMap::default();
let v = row.try_get_raw(i)?; for (i, column) in row.columns().iter().enumerate() {
let v = row.try_get_raw(i)?;
let v = crate::decode::to_json(v)?;
let v = crate::decode::to_json(v)?;
value.insert(column.name().to_string(), v);
} value.insert(column.name().to_string(), v);
}
values.push(value);
} values.push(value);
}
Ok(values)
} Ok(values)
}
/// Tauri SQL plugin builder.
#[derive(Default)] /// Tauri SQL plugin builder.
pub struct Builder { #[derive(Default)]
migrations: Option<HashMap<String, MigrationList>>, pub struct Builder {
} migrations: Option<HashMap<String, MigrationList>>,
}
impl Builder {
/// Add migrations to a database. impl Builder {
#[must_use] /// Add migrations to a database.
pub fn add_migrations(mut self, db_url: &str, migrations: Vec<Migration>) -> Self { #[must_use]
self.migrations pub fn add_migrations(mut self, db_url: &str, migrations: Vec<Migration>) -> Self {
.get_or_insert(Default::default()) self.migrations
.insert(db_url.to_string(), MigrationList(migrations)); .get_or_insert(Default::default())
self .insert(db_url.to_string(), MigrationList(migrations));
} self
}
pub fn build<R: Runtime>(mut self) -> TauriPlugin<R, Option<PluginConfig>> {
PluginBuilder::new("sql") pub fn build<R: Runtime>(mut self) -> TauriPlugin<R, Option<PluginConfig>> {
.invoke_handler(tauri::generate_handler![load, execute, select, close]) PluginBuilder::new("sql")
.setup_with_config(|app, config: Option<PluginConfig>| { .invoke_handler(tauri::generate_handler![load, execute, select, close])
let config = config.unwrap_or_default(); .setup_with_config(|app, config: Option<PluginConfig>| {
let config = config.unwrap_or_default();
#[cfg(feature = "sqlite")]
create_dir_all(app_path(app)).expect("problems creating App directory!"); #[cfg(feature = "sqlite")]
create_dir_all(app_path(app)).expect("problems creating App directory!");
tauri::async_runtime::block_on(async move {
let instances = DbInstances::default(); tauri::async_runtime::block_on(async move {
let mut lock = instances.0.lock().await; let instances = DbInstances::default();
for db in config.preload { let mut lock = instances.0.lock().await;
#[cfg(feature = "sqlite")] for db in config.preload {
let fqdb = path_mapper(app_path(app), &db); #[cfg(feature = "sqlite")]
#[cfg(not(feature = "sqlite"))] let fqdb = path_mapper(app_path(app), &db);
let fqdb = db.clone(); #[cfg(not(feature = "sqlite"))]
let fqdb = db.clone();
if !Db::database_exists(&fqdb).await.unwrap_or(false) {
Db::create_database(&fqdb).await?; if !Db::database_exists(&fqdb).await.unwrap_or(false) {
} Db::create_database(&fqdb).await?;
let pool = Pool::connect(&fqdb).await?; }
let pool = Pool::connect(&fqdb).await?;
if let Some(migrations) = self.migrations.as_mut().unwrap().remove(&db) {
let migrator = Migrator::new(migrations).await?; if let Some(migrations) = self.migrations.as_mut().unwrap().remove(&db) {
migrator.run(&pool).await?; let migrator = Migrator::new(migrations).await?;
} migrator.run(&pool).await?;
lock.insert(db, pool); }
} lock.insert(db, pool);
drop(lock); }
drop(lock);
app.manage(instances);
app.manage(Migrations(Mutex::new( app.manage(instances);
self.migrations.take().unwrap_or_default(), app.manage(Migrations(Mutex::new(
))); self.migrations.take().unwrap_or_default(),
)));
Ok(())
}) Ok(())
}) })
.on_event(|app, event| { })
if let RunEvent::Exit = event { .on_event(|app, event| {
tauri::async_runtime::block_on(async move { if let RunEvent::Exit = event {
let instances = &*app.state::<DbInstances>(); tauri::async_runtime::block_on(async move {
let instances = instances.0.lock().await; let instances = &*app.state::<DbInstances>();
for value in instances.values() { let instances = instances.0.lock().await;
value.close().await; for value in instances.values() {
} value.close().await;
}); }
} });
}) }
.build() })
} .build()
} }
}

@ -4,7 +4,7 @@ Simple, persistent key-value store.
## Install ## Install
_This plugin requires a Rust version of at least **1.64**_ _This plugin requires a Rust version of at least **1.67**_
There are three general methods of installation that we can recommend. There are three general methods of installation that we can recommend.

@ -8,8 +8,8 @@
"tauri": "tauri" "tauri": "tauri"
}, },
"devDependencies": { "devDependencies": {
"@tauri-apps/cli": "1.6.0", "@tauri-apps/cli": "1.6.3",
"vite": "^5.0.12", "typescript": "^5.8.2",
"typescript": "^5.3.3" "vite": "^6.2.5"
} }
} }

@ -25,7 +25,7 @@
"LICENSE" "LICENSE"
], ],
"devDependencies": { "devDependencies": {
"tslib": "2.6.3" "tslib": "2.8.1"
}, },
"dependencies": { "dependencies": {
"@tauri-apps/api": "1.6.0" "@tauri-apps/api": "1.6.0"

@ -4,7 +4,7 @@ Store secrets and keys using the [IOTA Stronghold](https://github.com/iotaledger
## Install ## Install
_This plugin requires a Rust version of at least **1.64**_ _This plugin requires a Rust version of at least **1.67**_
There are three general methods of installation that we can recommend. There are three general methods of installation that we can recommend.

@ -25,7 +25,7 @@
"LICENSE" "LICENSE"
], ],
"devDependencies": { "devDependencies": {
"tslib": "2.6.3" "tslib": "2.8.1"
}, },
"dependencies": { "dependencies": {
"@tauri-apps/api": "1.6.0" "@tauri-apps/api": "1.6.0"

@ -113,7 +113,7 @@ impl<'de> Deserialize<'de> for KeyType {
{ {
struct KeyTypeVisitor; struct KeyTypeVisitor;
impl<'de> Visitor<'de> for KeyTypeVisitor { impl Visitor<'_> for KeyTypeVisitor {
type Value = KeyType; type Value = KeyType;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {

@ -5,7 +5,7 @@ Download files from a remote HTTP server to disk.
## Install ## Install
_This plugin requires a Rust version of at least **1.64**_ _This plugin requires a Rust version of at least **1.67**_
There are three general methods of installation that we can recommend. There are three general methods of installation that we can recommend.

@ -25,7 +25,7 @@
"LICENSE" "LICENSE"
], ],
"devDependencies": { "devDependencies": {
"tslib": "2.6.3" "tslib": "2.8.1"
}, },
"dependencies": { "dependencies": {
"@tauri-apps/api": "1.6.0" "@tauri-apps/api": "1.6.0"

@ -19,7 +19,7 @@ http = "1"
rand = "0.8" rand = "0.8"
futures-util = "0.3" futures-util = "0.3"
tokio = { version = "1", features = ["net", "sync"] } tokio = { version = "1", features = ["net", "sync"] }
tokio-tungstenite = { version = "0.23", features = ["native-tls"] } tokio-tungstenite = { version = "0.27", features = ["native-tls"] }
hyper = { version = "1.4.1", features = ["client"] } hyper = { version = "1", features = ["client"] }
hyper-util = { version = "0.1.6", features = ["tokio", "http1"] } hyper-util = { version = "0.1", features = ["tokio", "http1"] }
base64 = "0.22.1" base64 = "0.22"

@ -4,7 +4,7 @@
## Install ## Install
_This plugin requires a Rust version of at least **1.64**_ _This plugin requires a Rust version of at least **1.67**_
There are three general methods of installation that we can recommend. There are three general methods of installation that we can recommend.

@ -9,9 +9,9 @@
"preview": "vite preview" "preview": "vite preview"
}, },
"devDependencies": { "devDependencies": {
"@tauri-apps/cli": "1.6.0", "@tauri-apps/cli": "1.6.3",
"typescript": "^5.3.3", "typescript": "^5.8.2",
"vite": "^5.0.12" "vite": "^6.2.5"
}, },
"dependencies": { "dependencies": {
"tauri-plugin-websocket-api": "link:..\\.." "tauri-plugin-websocket-api": "link:..\\.."

@ -11,7 +11,7 @@ tauri = { workspace = true }
tokio = { version = "1", features = ["net"] } tokio = { version = "1", features = ["net"] }
futures-util = "0.3" futures-util = "0.3"
tauri-plugin-websocket = { path = "../../../" } tauri-plugin-websocket = { path = "../../../" }
tokio-tungstenite = "0.23" tokio-tungstenite = "0.27"
[build-dependencies] [build-dependencies]
tauri-build = { workspace = true } tauri-build = { workspace = true }

@ -10,8 +10,9 @@ body {
margin: 0; margin: 0;
padding: 8px; padding: 8px;
box-sizing: border-box; box-sizing: border-box;
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, font-family:
Oxygen-Sans, Ubuntu, Cantarell, "Helvetica Neue", sans-serif; -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Oxygen-Sans, Ubuntu,
Cantarell, "Helvetica Neue", sans-serif;
} }
a { a {

@ -1,11 +1,37 @@
import { invoke, transformCallback } from "@tauri-apps/api/tauri"; import { invoke, transformCallback } from "@tauri-apps/api/tauri";
export interface ConnectionConfig { export interface ConnectionConfig {
/**
* Read buffer capacity. The default value is 128 KiB.
*/
readBufferSize?: number;
/** The target minimum size of the write buffer to reach before writing the data to the underlying stream. The default value is 128 KiB.
*
* If set to 0 each message will be eagerly written to the underlying stream. It is often more optimal to allow them to buffer a little, hence the default value.
*/
writeBufferSize?: number; writeBufferSize?: number;
/** The max size of the write buffer in bytes. Setting this can provide backpressure in the case the write buffer is filling up due to write errors. The default value is unlimited.
*
* Note: The write buffer only builds up past write_buffer_size when writes to the underlying stream are failing. So the write buffer can not fill up if you are not observing write errors.
*
* Note: Should always be at least write_buffer_size + 1 message and probably a little more depending on error handling strategy.
*/
maxWriteBufferSize?: number; maxWriteBufferSize?: number;
maxMessageSize?: number; /**
maxFrameSize?: number; * The maximum size of an incoming message. The string "none" means no size limit. The default value is 64 MiB which should be reasonably big for all normal use-cases but small enough to prevent memory eating by a malicious user.
*/
maxMessageSize?: number | "none";
/**
* The maximum size of a single incoming message frame. The string "none" means no size limit. The limit is for frame payload NOT including the frame header. The default value is 16 MiB which should be reasonably big for all normal use-cases but small enough to prevent memory eating by a malicious user.
*/
maxFrameSize?: number | "none";
/**
* When set to true, the server will accept and handle unmasked frames from the client. According to the RFC 6455, the server must close the connection to the client in such cases, however it seems like there are some popular libraries that are sending unmasked frames, ignoring the RFC. By default this option is set to false, i.e. according to RFC 6455.
*/
acceptUnmaskedFrames?: boolean; acceptUnmaskedFrames?: boolean;
/**
* Additional connect request headers.
*/
headers?: HeadersInit; headers?: HeadersInit;
} }

@ -24,7 +24,7 @@
"LICENSE" "LICENSE"
], ],
"devDependencies": { "devDependencies": {
"tslib": "2.6.3" "tslib": "2.8.1"
}, },
"dependencies": { "dependencies": {
"@tauri-apps/api": "1.6.0" "@tauri-apps/api": "1.6.0"

@ -87,13 +87,21 @@ pub struct ProxyConfiguration {
pub auth: Option<ProxyAuth>, pub auth: Option<ProxyAuth>,
} }
#[derive(Deserialize)]
#[serde(untagged, rename_all = "camelCase")]
enum Max {
None,
Number(usize),
}
#[derive(Deserialize)] #[derive(Deserialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct ConnectionConfig { pub(crate) struct ConnectionConfig {
pub read_buffer_size: Option<usize>,
pub write_buffer_size: Option<usize>, pub write_buffer_size: Option<usize>,
pub max_write_buffer_size: Option<usize>, pub max_write_buffer_size: Option<usize>,
pub max_message_size: Option<usize>, pub max_message_size: Option<Max>,
pub max_frame_size: Option<usize>, pub max_frame_size: Option<Max>,
#[serde(default)] #[serde(default)]
pub accept_unmasked_frames: bool, pub accept_unmasked_frames: bool,
pub headers: Option<Vec<(String, String)>>, pub headers: Option<Vec<(String, String)>>,
@ -101,18 +109,38 @@ pub struct ConnectionConfig {
impl From<ConnectionConfig> for WebSocketConfig { impl From<ConnectionConfig> for WebSocketConfig {
fn from(config: ConnectionConfig) -> Self { fn from(config: ConnectionConfig) -> Self {
// Disabling the warning on max_send_queue which we don't use anymore since it was deprecated. let mut builder =
#[allow(deprecated)] WebSocketConfig::default().accept_unmasked_frames(config.accept_unmasked_frames);
Self {
max_send_queue: None, if let Some(read_buffer_size) = config.read_buffer_size {
write_buffer_size: config.write_buffer_size.unwrap_or(128 * 1024), builder = builder.read_buffer_size(read_buffer_size)
max_write_buffer_size: config.max_write_buffer_size.unwrap_or(usize::MAX), }
// This may be harmful since if it's not provided from js we're overwriting the default value with None, meaning no size limit.
max_message_size: config.max_message_size, if let Some(write_buffer_size) = config.write_buffer_size {
// This may be harmful since if it's not provided from js we're overwriting the default value with None, meaning no size limit. builder = builder.write_buffer_size(write_buffer_size)
max_frame_size: config.max_frame_size,
accept_unmasked_frames: config.accept_unmasked_frames,
} }
if let Some(max_write_buffer_size) = config.max_write_buffer_size {
builder = builder.max_write_buffer_size(max_write_buffer_size)
}
if let Some(max_message_size) = config.max_message_size {
let max_size = match max_message_size {
Max::None => Option::None,
Max::Number(n) => Some(n),
};
builder = builder.max_message_size(max_size);
}
if let Some(max_frame_size) = config.max_frame_size {
let max_size = match max_frame_size {
Max::None => Option::None,
Max::Number(n) => Some(n),
};
builder = builder.max_frame_size(max_size);
}
builder
} }
} }
@ -191,21 +219,21 @@ async fn connect<R: Runtime>(
let response = match message { let response = match message {
Ok(Message::Text(t)) => { Ok(Message::Text(t)) => {
serde_json::to_value(WebSocketMessage::Text(t)).unwrap() serde_json::to_value(WebSocketMessage::Text(t.to_string())).unwrap()
} }
Ok(Message::Binary(t)) => { Ok(Message::Binary(t)) => {
serde_json::to_value(WebSocketMessage::Binary(t)).unwrap() serde_json::to_value(WebSocketMessage::Binary(t.to_vec())).unwrap()
} }
Ok(Message::Ping(t)) => { Ok(Message::Ping(t)) => {
serde_json::to_value(WebSocketMessage::Ping(t)).unwrap() serde_json::to_value(WebSocketMessage::Ping(t.to_vec())).unwrap()
} }
Ok(Message::Pong(t)) => { Ok(Message::Pong(t)) => {
serde_json::to_value(WebSocketMessage::Pong(t)).unwrap() serde_json::to_value(WebSocketMessage::Pong(t.to_vec())).unwrap()
} }
Ok(Message::Close(t)) => { Ok(Message::Close(t)) => {
serde_json::to_value(WebSocketMessage::Close(t.map(|v| CloseFrame { serde_json::to_value(WebSocketMessage::Close(t.map(|v| CloseFrame {
code: v.code.into(), code: v.code.into(),
reason: v.reason.into_owned(), reason: v.reason.to_string(),
}))) })))
.unwrap() .unwrap()
} }
@ -296,13 +324,13 @@ async fn send(
if let Some(write) = manager.0.lock().await.get_mut(&id) { if let Some(write) = manager.0.lock().await.get_mut(&id) {
write write
.send(match message { .send(match message {
WebSocketMessage::Text(t) => Message::Text(t), WebSocketMessage::Text(t) => Message::Text(t.into()),
WebSocketMessage::Binary(t) => Message::Binary(t), WebSocketMessage::Binary(t) => Message::Binary(t.into()),
WebSocketMessage::Ping(t) => Message::Ping(t), WebSocketMessage::Ping(t) => Message::Ping(t.into()),
WebSocketMessage::Pong(t) => Message::Pong(t), WebSocketMessage::Pong(t) => Message::Pong(t.into()),
WebSocketMessage::Close(t) => Message::Close(t.map(|v| ProtocolCloseFrame { WebSocketMessage::Close(t) => Message::Close(t.map(|v| ProtocolCloseFrame {
code: v.code.into(), code: v.code.into(),
reason: std::borrow::Cow::Owned(v.reason), reason: v.reason.into(),
})), })),
}) })
.await?; .await?;

@ -4,7 +4,7 @@ Save window positions and sizes and restore them when the app is reopened.
## Install ## Install
_This plugin requires a Rust version of at least **1.64**_ _This plugin requires a Rust version of at least **1.67**_
There are three general methods of installation that we can recommend. There are three general methods of installation that we can recommend.

@ -25,7 +25,7 @@
"LICENSE" "LICENSE"
], ],
"devDependencies": { "devDependencies": {
"tslib": "2.6.3" "tslib": "2.8.1"
}, },
"dependencies": { "dependencies": {
"@tauri-apps/api": "1.6.0" "@tauri-apps/api": "1.6.0"

File diff suppressed because it is too large Load Diff

@ -1,3 +1,5 @@
packages: packages:
- plugins/* - plugins/*
- plugins/*/examples/* - plugins/*/examples/*
onlyBuiltDependencies:
- esbuild

@ -4,7 +4,7 @@
## Install ## Install
_This plugin requires a Rust version of at least **1.64**_ _This plugin requires a Rust version of at least **1.67**_
There are three general methods of installation that we can recommend. There are three general methods of installation that we can recommend.

Loading…
Cancel
Save