Merge remote-tracking branch 'origin/dev' into sql/absolute-paths

pull/279/head
FabianLars 2 years ago
commit f9ab350412
No known key found for this signature in database
GPG Key ID: 3B12BC1DEBF61125

@ -0,0 +1,53 @@
name: Check MSRV
on:
push:
branches:
- dev
paths:
- '.github/workflows/msrv-check.yml'
- 'plugins/*/src/**'
- '**/Cargo.toml'
- '**/Cargo.lock'
pull_request:
branches:
- dev
paths:
- '.github/workflows/msrv-check.yml'
- 'plugins/*/src/**'
- '**/Cargo.toml'
- '**/Cargo.lock'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
msrv:
runs-on: ubuntu-latest
strategy:
fail-fast: false
steps:
- uses: actions/checkout@v3
- name: install webkit2gtk and libudev for [authenticator]
run: |
sudo apt-get update
sudo apt-get install -y webkit2gtk-4.0 libudev-dev
- uses: dtolnay/rust-toolchain@1.64.0
- uses: Swatinem/rust-cache@v2
- name: build
run: cargo build --workspace --exclude 'tauri-plugin-sql' --all-targets --all-features
- name: build sql:sqlite
run: cargo build --package 'tauri-plugin-sql' --all-targets --features sqlite
- name: build sql:mysql
run: cargo build --package 'tauri-plugin-sql' --all-targets --features mysql
- name: build sql:postgres
run: cargo build --package 'tauri-plugin-sql' --all-targets --features postgres

88
Cargo.lock generated

@ -153,7 +153,7 @@ dependencies = [
"slab",
"socket2",
"waker-fn",
"windows-sys",
"windows-sys 0.42.0",
]
[[package]]
@ -1156,7 +1156,7 @@ dependencies = [
"cfg-if",
"libc",
"redox_syscall",
"windows-sys",
"windows-sys 0.42.0",
]
[[package]]
@ -1986,6 +1986,15 @@ dependencies = [
"zeroize",
]
[[package]]
name = "iota-crypto"
version = "0.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7d1d447f89ea13f2cd26d50195378bb2f76a0f9320ae4db3c0811b25fe6ed6c1"
dependencies = [
"autocfg",
]
[[package]]
name = "iota_stronghold"
version = "1.0.5"
@ -1994,7 +2003,7 @@ checksum = "6c5baaa2460627283f54b968db7a38c9c754dc6059157cae64550ed1b79c91aa"
dependencies = [
"bincode",
"hkdf",
"iota-crypto",
"iota-crypto 0.15.3",
"rust-argon2",
"serde",
"stronghold-derive",
@ -2384,7 +2393,7 @@ dependencies = [
"libc",
"log",
"wasi 0.11.0+wasi-snapshot-preview1",
"windows-sys",
"windows-sys 0.42.0",
]
[[package]]
@ -2506,7 +2515,7 @@ dependencies = [
"mio",
"serde",
"walkdir",
"windows-sys",
"windows-sys 0.42.0",
]
[[package]]
@ -2631,6 +2640,15 @@ dependencies = [
"syn",
]
[[package]]
name = "num_threads"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2819ce041d2ee131036f4fc9d6ae7ae125a3a40e97ba64d04fe799ad9dabbb44"
dependencies = [
"libc",
]
[[package]]
name = "objc"
version = "0.2.7"
@ -2808,7 +2826,7 @@ dependencies = [
"libc",
"redox_syscall",
"smallvec",
"windows-sys",
"windows-sys 0.42.0",
]
[[package]]
@ -3020,7 +3038,7 @@ dependencies = [
"line-wrap",
"quick-xml",
"serde",
"time 0.3.17",
"time 0.3.20",
]
[[package]]
@ -3046,7 +3064,7 @@ dependencies = [
"libc",
"log",
"wepoll-ffi",
"windows-sys",
"windows-sys 0.42.0",
]
[[package]]
@ -3489,7 +3507,7 @@ version = "0.1.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "713cfb06c7059f3588fb8044c0fad1d09e3c01d225e25b9220dbfdcf16dbb1b3"
dependencies = [
"windows-sys",
"windows-sys 0.42.0",
]
[[package]]
@ -3901,6 +3919,7 @@ dependencies = [
"sqlx-rt",
"stringprep",
"thiserror",
"time 0.3.20",
"tokio-stream",
"url",
"webpki-roots",
@ -4013,7 +4032,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d93abb10fbd11335d31c33a70b2523c0caab348215caa2ce6da04a268c30afcb"
dependencies = [
"dirs",
"iota-crypto",
"iota-crypto 0.15.3",
"libc",
"libsodium-sys",
"log",
@ -4044,7 +4063,7 @@ dependencies = [
"anyhow",
"dirs-next",
"hex",
"iota-crypto",
"iota-crypto 0.15.3",
"once_cell",
"paste",
"serde",
@ -4238,7 +4257,7 @@ dependencies = [
"sha2 0.10.6",
"tauri-utils",
"thiserror",
"time 0.3.17",
"time 0.3.20",
"uuid 1.3.0",
"walkdir",
]
@ -4336,7 +4355,7 @@ dependencies = [
"serde_json",
"serde_repr",
"tauri",
"time 0.3.17",
"time 0.3.20",
]
[[package]]
@ -4372,7 +4391,7 @@ dependencies = [
"serde_json",
"tauri",
"thiserror",
"windows-sys",
"windows-sys 0.45.0",
"zbus",
]
@ -4380,13 +4399,14 @@ dependencies = [
name = "tauri-plugin-sql"
version = "0.1.0"
dependencies = [
"futures",
"futures-core",
"log",
"serde",
"serde_json",
"sqlx",
"tauri",
"thiserror",
"time 0.3.20",
"tokio",
]
@ -4406,7 +4426,7 @@ name = "tauri-plugin-stronghold"
version = "0.1.0"
dependencies = [
"hex",
"iota-crypto",
"iota-crypto 0.16.0",
"iota_stronghold",
"log",
"rand 0.8.5",
@ -4612,11 +4632,13 @@ dependencies = [
[[package]]
name = "time"
version = "0.3.17"
version = "0.3.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a561bf4617eebd33bca6434b988f39ed798e527f51a1e797d0ee4f61c0a38376"
checksum = "cd0cbfecb4d19b5ea75bb31ad904eb5b9fa13f21079c3b92017ebdf4999a5890"
dependencies = [
"itoa 1.0.5",
"libc",
"num_threads",
"serde",
"time-core",
"time-macros",
@ -4630,9 +4652,9 @@ checksum = "2e153e1f1acaef8acc537e68b44906d2db6436e2b35ac2c6b42640fff91f00fd"
[[package]]
name = "time-macros"
version = "0.2.6"
version = "0.2.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d967f99f534ca7e495c575c62638eebc2898a8c84c119b89e250477bc4ba16b2"
checksum = "fd80a657e71da814b8e5d60d3374fc6d35045062245d80224748ae522dd76f36"
dependencies = [
"time-core",
]
@ -4678,7 +4700,7 @@ dependencies = [
"num_cpus",
"pin-project-lite",
"socket2",
"windows-sys",
"windows-sys 0.42.0",
]
[[package]]
@ -5414,6 +5436,30 @@ dependencies = [
"windows_x86_64_msvc 0.42.1",
]
[[package]]
name = "windows-sys"
version = "0.45.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0"
dependencies = [
"windows-targets",
]
[[package]]
name = "windows-targets"
version = "0.42.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e2522491fbfcd58cc84d47aeb2958948c4b8982e9a2d8a2a35bbaed431390e7"
dependencies = [
"windows_aarch64_gnullvm",
"windows_aarch64_msvc 0.42.1",
"windows_i686_gnu 0.42.1",
"windows_i686_msvc 0.42.1",
"windows_x86_64_gnu 0.42.1",
"windows_x86_64_gnullvm",
"windows_x86_64_msvc 0.42.1",
]
[[package]]
name = "windows-tokens"
version = "0.39.0"

@ -17,7 +17,7 @@ serde_repr = "0.1"
byte-unit = "4.0"
fern = "0.6"
log = { workspace = true, features = ["kv_unstable"] }
time = { version = "0.3", features = ["formatting"] }
time = { version = "0.3", features = ["formatting", "local-offset"] }
[features]
colored = ["fern/colored"]

@ -21,9 +21,11 @@ use tauri::{
};
pub use fern;
use time::OffsetDateTime;
const DEFAULT_MAX_FILE_SIZE: u128 = 40000;
const DEFAULT_ROTATION_STRATEGY: RotationStrategy = RotationStrategy::KeepOne;
const DEFAULT_TIMEZONE_STRATEGY: TimezoneStrategy = TimezoneStrategy::UseUtc;
const DEFAULT_LOG_TARGETS: [LogTarget; 2] = [LogTarget::Stdout, LogTarget::LogDir];
/// An enum representing the available verbosity levels of the logger.
@ -83,6 +85,23 @@ pub enum RotationStrategy {
KeepOne,
}
#[derive(Debug, Clone)]
pub enum TimezoneStrategy {
UseUtc,
UseLocal,
}
impl TimezoneStrategy {
pub fn get_now(&self) -> OffsetDateTime {
match self {
TimezoneStrategy::UseUtc => OffsetDateTime::now_utc(),
TimezoneStrategy::UseLocal => {
OffsetDateTime::now_local().unwrap_or_else(|_| OffsetDateTime::now_utc())
} // Fallback to UTC since Rust cannot determine local timezone
}
}
}
#[derive(Debug, Serialize, Clone)]
struct RecordPayload {
message: String,
@ -145,6 +164,7 @@ fn log(
pub struct Builder {
dispatch: fern::Dispatch,
rotation_strategy: RotationStrategy,
timezone_strategy: TimezoneStrategy,
max_file_size: u128,
targets: Vec<LogTarget>,
}
@ -157,7 +177,7 @@ impl Default for Builder {
let dispatch = fern::Dispatch::new().format(move |out, message, record| {
out.finish(format_args!(
"{}[{}][{}] {}",
time::OffsetDateTime::now_utc().format(&format).unwrap(),
DEFAULT_TIMEZONE_STRATEGY.get_now().format(&format).unwrap(),
record.target(),
record.level(),
message
@ -166,6 +186,7 @@ impl Default for Builder {
Self {
dispatch,
rotation_strategy: DEFAULT_ROTATION_STRATEGY,
timezone_strategy: DEFAULT_TIMEZONE_STRATEGY,
max_file_size: DEFAULT_MAX_FILE_SIZE,
targets: DEFAULT_LOG_TARGETS.into(),
}
@ -182,6 +203,24 @@ impl Builder {
self
}
pub fn timezone_strategy(mut self, timezone_strategy: TimezoneStrategy) -> Self {
self.timezone_strategy = timezone_strategy.clone();
let format =
time::format_description::parse("[[[year]-[month]-[day]][[[hour]:[minute]:[second]]")
.unwrap();
self.dispatch = fern::Dispatch::new().format(move |out, message, record| {
out.finish(format_args!(
"{}[{}][{}] {}",
timezone_strategy.get_now().format(&format).unwrap(),
record.target(),
record.level(),
message
))
});
self
}
pub fn max_file_size(mut self, max_file_size: u128) -> Self {
self.max_file_size = max_file_size;
self
@ -228,10 +267,12 @@ impl Builder {
let format =
time::format_description::parse("[[[year]-[month]-[day]][[[hour]:[minute]:[second]]")
.unwrap();
let timezone_strategy = self.timezone_strategy.clone();
self.format(move |out, message, record| {
out.finish(format_args!(
"{}[{}][{}] {}",
time::OffsetDateTime::now_utc().format(&format).unwrap(),
timezone_strategy.get_now().format(&format).unwrap(),
record.target(),
colors.color(record.level()),
message
@ -259,6 +300,7 @@ impl Builder {
&path,
app_name,
&self.rotation_strategy,
&self.timezone_strategy,
self.max_file_size,
)?)?
.into()
@ -273,6 +315,7 @@ impl Builder {
&path,
app_name,
&self.rotation_strategy,
&self.timezone_strategy,
self.max_file_size,
)?)?
.into()
@ -306,6 +349,7 @@ fn get_log_file_path(
dir: &impl AsRef<Path>,
app_name: &str,
rotation_strategy: &RotationStrategy,
timezone_strategy: &TimezoneStrategy,
max_file_size: u128,
) -> plugin::Result<PathBuf> {
let path = dir.as_ref().join(format!("{app_name}.log"));
@ -318,7 +362,8 @@ fn get_log_file_path(
let to = dir.as_ref().join(format!(
"{}_{}.log",
app_name,
time::OffsetDateTime::now_utc()
timezone_strategy
.get_now()
.format(
&time::format_description::parse(
"[year]-[month]-[day]_[hour]-[minute]-[second]"

@ -18,7 +18,7 @@ log.workspace = true
thiserror.workspace = true
[target.'cfg(target_os = "windows")'.dependencies.windows-sys]
version = "0.42"
version = "0.45"
features = [
"Win32_System_Threading",
"Win32_System_DataExchange",

@ -1,4 +1,4 @@
![tauri-plugin-single-instance](banner.jpg)
![tauri-plugin-single-instance](banner.png)
Ensure a single instance of your tauri app is running.
@ -38,7 +38,7 @@ struct Payload {
fn main() {
tauri::Builder::default()
.plugin(auri_plugin_single_instance::init(|app, argv, cwd| {
.plugin(tauri_plugin_single_instance::init(|app, argv, cwd| {
println!("{}, {argv:?}, {cwd}", app.package_info().name);
app.emit_all("single-instance", Payload { args: argv, cwd }).unwrap();

@ -15,9 +15,10 @@ serde_json.workspace = true
tauri.workspace = true
log.workspace = true
thiserror.workspace = true
sqlx = { version = "0.6", features = ["runtime-tokio-rustls", "json"] }
futures-core = "0.3"
sqlx = { version = "0.6", features = ["runtime-tokio-rustls", "json", "time"] }
time = "0.3"
tokio = { version = "1", features = ["sync"] }
futures = "0.3"
[features]
sqlite = ["sqlx/sqlite"]

@ -0,0 +1,15 @@
#[cfg(feature = "mysql")]
mod mysql;
#[cfg(feature = "postgres")]
mod postgres;
#[cfg(feature = "sqlite")]
mod sqlite;
#[cfg(feature = "mysql")]
pub(crate) use mysql::to_json;
#[cfg(feature = "postgres")]
pub(crate) use postgres::to_json;
#[cfg(feature = "sqlite")]
pub(crate) use sqlite::to_json;

@ -0,0 +1,90 @@
use serde_json::Value as JsonValue;
use sqlx::{mysql::MySqlValueRef, TypeInfo, Value, ValueRef};
use time::{Date, OffsetDateTime, PrimitiveDateTime, Time};
use crate::Error;
pub(crate) fn to_json(v: MySqlValueRef) -> Result<JsonValue, Error> {
if v.is_null() {
return Ok(JsonValue::Null);
}
let res = match v.type_info().name() {
"CHAR" | "VARCHAR" | "TINYTEXT" | "TEXT" | "MEDIUMTEXT" | "LONGTEXT" | "ENUM" => {
if let Ok(v) = ValueRef::to_owned(&v).try_decode() {
JsonValue::String(v)
} else {
JsonValue::Null
}
}
"FLOAT" | "DOUBLE" => {
if let Ok(v) = ValueRef::to_owned(&v).try_decode::<f64>() {
JsonValue::from(v)
} else {
JsonValue::Null
}
}
"TINYINT" | "SMALLINT" | "INT" | "MEDIUMINT" | "BIGINT" => {
if let Ok(v) = ValueRef::to_owned(&v).try_decode::<i64>() {
JsonValue::Number(v.into())
} else {
JsonValue::Null
}
}
"TINYINT UNSIGNED" | "SMALLINT UNSIGNED" | "INT UNSIGNED" | "MEDIUMINT UNSIGNED"
| "BIGINT UNSIGNED" | "YEAR" => {
if let Ok(v) = ValueRef::to_owned(&v).try_decode::<u64>() {
JsonValue::Number(v.into())
} else {
JsonValue::Null
}
}
"BOOLEAN" => {
if let Ok(v) = ValueRef::to_owned(&v).try_decode() {
JsonValue::Bool(v)
} else {
JsonValue::Null
}
}
"DATE" => {
if let Ok(v) = ValueRef::to_owned(&v).try_decode::<Date>() {
JsonValue::String(v.to_string())
} else {
JsonValue::Null
}
}
"TIME" => {
if let Ok(v) = ValueRef::to_owned(&v).try_decode::<Time>() {
JsonValue::String(v.to_string())
} else {
JsonValue::Null
}
}
"DATETIME" => {
if let Ok(v) = ValueRef::to_owned(&v).try_decode::<PrimitiveDateTime>() {
JsonValue::String(v.to_string())
} else {
JsonValue::Null
}
}
"TIMESTAMP" => {
if let Ok(v) = ValueRef::to_owned(&v).try_decode::<OffsetDateTime>() {
JsonValue::String(v.to_string())
} else {
JsonValue::Null
}
}
"JSON" => ValueRef::to_owned(&v).try_decode().unwrap_or_default(),
"TINIYBLOB" | "MEDIUMBLOB" | "BLOB" | "LONGBLOB" => {
if let Ok(v) = ValueRef::to_owned(&v).try_decode::<Vec<u8>>() {
JsonValue::Array(v.into_iter().map(|n| JsonValue::Number(n.into())).collect())
} else {
JsonValue::Null
}
}
"NULL" => JsonValue::Null,
_ => return Err(Error::UnsupportedDatatype(v.type_info().name().to_string())),
};
Ok(res)
}

@ -0,0 +1,82 @@
use serde_json::Value as JsonValue;
use sqlx::{postgres::PgValueRef, TypeInfo, Value, ValueRef};
use time::{Date, OffsetDateTime, PrimitiveDateTime, Time};
use crate::Error;
pub(crate) fn to_json(v: PgValueRef) -> Result<JsonValue, Error> {
if v.is_null() {
return Ok(JsonValue::Null);
}
let res = match v.type_info().name() {
"CHAR" | "VARCHAR" | "TEXT" | "NAME" => {
if let Ok(v) = ValueRef::to_owned(&v).try_decode() {
JsonValue::String(v)
} else {
JsonValue::Null
}
}
"FLOAT4" | "FLOAT8" => {
if let Ok(v) = ValueRef::to_owned(&v).try_decode::<f64>() {
JsonValue::from(v)
} else {
JsonValue::Null
}
}
"INT2" | "INT4" | "INT8" => {
if let Ok(v) = ValueRef::to_owned(&v).try_decode::<i64>() {
JsonValue::Number(v.into())
} else {
JsonValue::Null
}
}
"BOOL" => {
if let Ok(v) = ValueRef::to_owned(&v).try_decode() {
JsonValue::Bool(v)
} else {
JsonValue::Null
}
}
"DATE" => {
if let Ok(v) = ValueRef::to_owned(&v).try_decode::<Date>() {
JsonValue::String(v.to_string())
} else {
JsonValue::Null
}
}
"TIME" => {
if let Ok(v) = ValueRef::to_owned(&v).try_decode::<Time>() {
JsonValue::String(v.to_string())
} else {
JsonValue::Null
}
}
"TIMESTAMP" => {
if let Ok(v) = ValueRef::to_owned(&v).try_decode::<PrimitiveDateTime>() {
JsonValue::String(v.to_string())
} else {
JsonValue::Null
}
}
"TIMESTAMPTZ" => {
if let Ok(v) = ValueRef::to_owned(&v).try_decode::<OffsetDateTime>() {
JsonValue::String(v.to_string())
} else {
JsonValue::Null
}
}
"JSON" | "JSONB" => ValueRef::to_owned(&v).try_decode().unwrap_or_default(),
"BYTEA" => {
if let Ok(v) = ValueRef::to_owned(&v).try_decode::<Vec<u8>>() {
JsonValue::Array(v.into_iter().map(|n| JsonValue::Number(n.into())).collect())
} else {
JsonValue::Null
}
}
"VOID" => JsonValue::Null,
_ => return Err(Error::UnsupportedDatatype(v.type_info().name().to_string())),
};
Ok(res)
}

@ -0,0 +1,74 @@
use serde_json::Value as JsonValue;
use sqlx::{sqlite::SqliteValueRef, TypeInfo, Value, ValueRef};
use time::{Date, PrimitiveDateTime, Time};
use crate::Error;
pub(crate) fn to_json(v: SqliteValueRef) -> Result<JsonValue, Error> {
if v.is_null() {
return Ok(JsonValue::Null);
}
let res = match v.type_info().name() {
"TEXT" => {
if let Ok(v) = v.to_owned().try_decode() {
JsonValue::String(v)
} else {
JsonValue::Null
}
}
"REAL" => {
if let Ok(v) = v.to_owned().try_decode::<f64>() {
JsonValue::from(v)
} else {
JsonValue::Null
}
}
"INTEGER" | "NUMERIC" => {
if let Ok(v) = v.to_owned().try_decode::<i64>() {
JsonValue::Number(v.into())
} else {
JsonValue::Null
}
}
"BOOLEAN" => {
if let Ok(v) = v.to_owned().try_decode() {
JsonValue::Bool(v)
} else {
JsonValue::Null
}
}
"DATE" => {
if let Ok(v) = v.to_owned().try_decode::<Date>() {
JsonValue::String(v.to_string())
} else {
JsonValue::Null
}
}
"TIME" => {
if let Ok(v) = v.to_owned().try_decode::<Time>() {
JsonValue::String(v.to_string())
} else {
JsonValue::Null
}
}
"DATETIME" => {
if let Ok(v) = v.to_owned().try_decode::<PrimitiveDateTime>() {
JsonValue::String(v.to_string())
} else {
JsonValue::Null
}
}
"BLOB" => {
if let Ok(v) = v.to_owned().try_decode::<Vec<u8>>() {
JsonValue::Array(v.into_iter().map(|n| JsonValue::Number(n.into())).collect())
} else {
JsonValue::Null
}
}
"NULL" => JsonValue::Null,
_ => return Err(Error::UnsupportedDatatype(v.type_info().name().to_string())),
};
Ok(res)
}

@ -14,15 +14,6 @@ compile_error!(
"Database driver not defined. Please set the feature flag for the driver of your choice."
);
#[cfg(any(
all(feature = "sqlite", not(any(feature = "mysql", feature = "postgres"))),
all(feature = "mysql", not(any(feature = "sqlite", feature = "postgres"))),
all(feature = "postgres", not(any(feature = "sqlite", feature = "mysql"))),
))]
mod decode;
mod plugin;
#[cfg(any(
all(feature = "sqlite", not(any(feature = "mysql", feature = "postgres"))),
all(feature = "mysql", not(any(feature = "sqlite", feature = "postgres"))),
all(feature = "postgres", not(any(feature = "sqlite", feature = "mysql"))),
))]
pub use plugin::*;

@ -2,7 +2,7 @@
// SPDX-License-Identifier: Apache-2.0
// SPDX-License-Identifier: MIT
use futures::future::BoxFuture;
use futures_core::future::BoxFuture;
use serde::{ser::Serializer, Deserialize, Serialize};
use serde_json::Value as JsonValue;
use sqlx::{
@ -10,7 +10,7 @@ use sqlx::{
migrate::{
MigrateDatabase, Migration as SqlxMigration, MigrationSource, MigrationType, Migrator,
},
Column, Pool, Row, TypeInfo, ValueRef,
Column, Pool, Row,
};
use tauri::{
command,
@ -282,58 +282,7 @@ async fn select(
for (i, column) in row.columns().iter().enumerate() {
let v = row.try_get_raw(i)?;
let v = if v.is_null() {
JsonValue::Null
} else {
// TODO: postgresql's JSON type
match v.type_info().name() {
"VARCHAR" | "STRING" | "TEXT" | "TINYTEXT" | "LONGTEXT" | "NVARCHAR"
| "BIGVARCHAR" | "CHAR" | "BIGCHAR" | "NCHAR" | "DATETIME" | "DATE"
| "TIME" | "YEAR" | "TIMESTAMP" => {
if let Ok(s) = row.try_get(i) {
JsonValue::String(s)
} else {
JsonValue::Null
}
}
"BOOL" | "BOOLEAN" => {
if let Ok(b) = row.try_get(i) {
JsonValue::Bool(b)
} else {
let x: String = row.get(i);
JsonValue::Bool(x.to_lowercase() == "true")
}
}
"INT" | "NUMBER" | "INTEGER" | "BIGINT" | "INT2" | "INT4" | "INT8"
| "NUMERIC" | "TINYINT" | "SMALLINT" | "MEDIUMINT" | "TINYINT UNSINGED"
| "SMALLINT UNSINGED" | "INT UNSINGED" | "MEDIUMINT UNSINGED"
| "BIGINT UNSINGED" => {
if let Ok(n) = row.try_get::<i64, usize>(i) {
JsonValue::Number(n.into())
} else {
JsonValue::Null
}
}
"REAL" | "FLOAT" | "DOUBLE" | "FLOAT4" | "FLOAT8" => {
if let Ok(n) = row.try_get::<f64, usize>(i) {
JsonValue::from(n)
} else {
JsonValue::Null
}
}
"BLOB" | "TINYBLOB" | "MEDIUMBLOB" | "LONGBLOB" | "BINARY" | "VARBINARY"
| "BYTEA" => {
if let Ok(n) = row.try_get::<Vec<u8>, usize>(i) {
JsonValue::Array(
n.into_iter().map(|n| JsonValue::Number(n.into())).collect(),
)
} else {
JsonValue::Null
}
}
_ => return Err(Error::UnsupportedDatatype(v.type_info().name().to_string())),
}
};
let v = crate::decode::to_json(v)?;
value.insert(column.name().to_string(), v);
}

@ -59,6 +59,46 @@ await store.set("some-key", { value: 5 });
const val = await store.get("some-key");
assert(val, { value: 5 });
await store.save(); // this manually saves the store, otherwise the store is only saved when your app is closed
```
### Persisting values
Values added to the store are not persisted between application loads unless:
1. The application is closed gracefully (plugin automatically saves)
2. The store is manually saved (using `store.save()`)
## Usage from Rust
You can also access Stores from Rust, you can create new stores:
```rust
use tauri_plugin_store::store::StoreBuilder;
use serde_json::json;
fn main() {
tauri::Builder::default()
.plugin(tauri_plugin_store::Builder::default().build())
.setup(|app| {
let mut store = StoreBuilder::new(app.handle(), "path/to/store.bin".parse()?).build();
store.insert("a".to_string(), json!("b")) // note that values must be serd_json::Value to be compatible with JS
})
.run(tauri::generate_context!())
.expect("error while running tauri application");
}
```
As you may have noticed, the Store crated above isn't accessible to the frontend. To interoperate with stores created by JS use the exported `with_store` method:
```rust
use tauri::Wry;
use tauri_plugin_store::with_store;
let stores = app.state::<StoreCollection<Wry>>();
let path = PathBuf::from("path/to/the/storefile");
with_store(app_handle, stores, path, |store| store.insert("a".to_string(), json!("b")))
```
## Contributing

@ -10,9 +10,9 @@ use std::path::PathBuf;
#[non_exhaustive]
pub enum Error {
#[error("Failed to serialize store. {0}")]
Serialize(Box<dyn std::error::Error>),
Serialize(Box<dyn std::error::Error + Send + Sync>),
#[error("Failed to deserialize store. {0}")]
Deserialize(Box<dyn std::error::Error>),
Deserialize(Box<dyn std::error::Error + Send + Sync>),
/// JSON error.
#[error(transparent)]
Json(#[from] serde_json::Error),
@ -22,6 +22,9 @@ pub enum Error {
/// Store not found
#[error("Store \"{0}\" not found")]
NotFound(PathBuf),
/// Some Tauri API failed
#[error(transparent)]
Tauri(#[from] tauri::Error),
}
impl Serialize for Error {

@ -5,251 +5,201 @@
pub use error::Error;
use log::warn;
use serde::Serialize;
use serde_json::Value as JsonValue;
use std::{collections::HashMap, path::PathBuf, sync::Mutex};
pub use serde_json::Value as JsonValue;
use std::{
collections::HashMap,
path::{Path, PathBuf},
sync::Mutex,
};
pub use store::{Store, StoreBuilder};
use tauri::{
plugin::{self, TauriPlugin},
AppHandle, Manager, RunEvent, Runtime, State, Window,
AppHandle, Manager, RunEvent, Runtime, State,
};
mod error;
mod store;
#[derive(Serialize, Clone)]
struct ChangePayload {
path: PathBuf,
key: String,
value: JsonValue,
struct ChangePayload<'a> {
path: &'a Path,
key: &'a str,
value: &'a JsonValue,
}
#[derive(Default)]
struct StoreCollection {
stores: Mutex<HashMap<PathBuf, Store>>,
pub struct StoreCollection<R: Runtime> {
stores: Mutex<HashMap<PathBuf, Store<R>>>,
frozen: bool,
}
fn with_store<R: Runtime, T, F: FnOnce(&mut Store) -> Result<T, Error>>(
app: &AppHandle<R>,
collection: State<'_, StoreCollection>,
path: PathBuf,
pub fn with_store<R: Runtime, T, F: FnOnce(&mut Store<R>) -> Result<T, Error>>(
app: AppHandle<R>,
collection: State<'_, StoreCollection<R>>,
path: impl AsRef<Path>,
f: F,
) -> Result<T, Error> {
let mut stores = collection.stores.lock().expect("mutex poisoned");
if !stores.contains_key(&path) {
let path = path.as_ref();
if !stores.contains_key(path) {
if collection.frozen {
return Err(Error::NotFound(path));
return Err(Error::NotFound(path.to_path_buf()));
}
let mut store = StoreBuilder::new(path.clone()).build();
let mut store = StoreBuilder::new(app, path.to_path_buf()).build();
// ignore loading errors, just use the default
if let Err(err) = store.load(app) {
if let Err(err) = store.load() {
warn!(
"Failed to load store {:?} from disk: {}. Falling back to default values.",
path, err
);
}
stores.insert(path.clone(), store);
stores.insert(path.to_path_buf(), store);
}
f(stores
.get_mut(&path)
.get_mut(path)
.expect("failed to retrieve store. This is a bug!"))
}
#[tauri::command]
async fn set<R: Runtime>(
app: AppHandle<R>,
window: Window<R>,
stores: State<'_, StoreCollection>,
stores: State<'_, StoreCollection<R>>,
path: PathBuf,
key: String,
value: JsonValue,
) -> Result<(), Error> {
with_store(&app, stores, path.clone(), |store| {
store.cache.insert(key.clone(), value.clone());
let _ = window.emit("store://change", ChangePayload { path, key, value });
Ok(())
})
with_store(app, stores, path, |store| store.insert(key, value))
}
#[tauri::command]
async fn get<R: Runtime>(
app: AppHandle<R>,
stores: State<'_, StoreCollection>,
stores: State<'_, StoreCollection<R>>,
path: PathBuf,
key: String,
) -> Result<Option<JsonValue>, Error> {
with_store(&app, stores, path, |store| {
Ok(store.cache.get(&key).cloned())
})
with_store(app, stores, path, |store| Ok(store.get(key).cloned()))
}
#[tauri::command]
async fn has<R: Runtime>(
app: AppHandle<R>,
stores: State<'_, StoreCollection>,
stores: State<'_, StoreCollection<R>>,
path: PathBuf,
key: String,
) -> Result<bool, Error> {
with_store(&app, stores, path, |store| {
Ok(store.cache.contains_key(&key))
})
with_store(app, stores, path, |store| Ok(store.has(key)))
}
#[tauri::command]
async fn delete<R: Runtime>(
app: AppHandle<R>,
window: Window<R>,
stores: State<'_, StoreCollection>,
stores: State<'_, StoreCollection<R>>,
path: PathBuf,
key: String,
) -> Result<bool, Error> {
with_store(&app, stores, path.clone(), |store| {
let flag = store.cache.remove(&key).is_some();
if flag {
let _ = window.emit(
"store://change",
ChangePayload {
path,
key,
value: JsonValue::Null,
},
);
}
Ok(flag)
})
with_store(app, stores, path, |store| store.delete(key))
}
#[tauri::command]
async fn clear<R: Runtime>(
app: AppHandle<R>,
window: Window<R>,
stores: State<'_, StoreCollection>,
stores: State<'_, StoreCollection<R>>,
path: PathBuf,
) -> Result<(), Error> {
with_store(&app, stores, path.clone(), |store| {
let keys = store.cache.keys().cloned().collect::<Vec<String>>();
store.cache.clear();
for key in keys {
let _ = window.emit(
"store://change",
ChangePayload {
path: path.clone(),
key,
value: JsonValue::Null,
},
);
}
Ok(())
})
with_store(app, stores, path, |store| store.clear())
}
#[tauri::command]
async fn reset<R: Runtime>(
app: AppHandle<R>,
window: Window<R>,
collection: State<'_, StoreCollection>,
collection: State<'_, StoreCollection<R>>,
path: PathBuf,
) -> Result<(), Error> {
let has_defaults = collection
.stores
.lock()
.expect("mutex poisoned")
.get(&path)
.map(|store| store.defaults.is_some());
if Some(true) == has_defaults {
with_store(&app, collection, path.clone(), |store| {
if let Some(defaults) = &store.defaults {
for (key, value) in &store.cache {
if defaults.get(key) != Some(value) {
let _ = window.emit(
"store://change",
ChangePayload {
path: path.clone(),
key: key.clone(),
value: defaults.get(key).cloned().unwrap_or(JsonValue::Null),
},
);
}
}
store.cache = defaults.clone();
}
Ok(())
})
} else {
clear(app, window, collection, path).await
}
with_store(app, collection, path, |store| store.reset())
}
#[tauri::command]
async fn keys<R: Runtime>(
app: AppHandle<R>,
stores: State<'_, StoreCollection>,
stores: State<'_, StoreCollection<R>>,
path: PathBuf,
) -> Result<Vec<String>, Error> {
with_store(&app, stores, path, |store| {
Ok(store.cache.keys().cloned().collect())
with_store(app, stores, path, |store| {
Ok(store.keys().cloned().collect())
})
}
#[tauri::command]
async fn values<R: Runtime>(
app: AppHandle<R>,
stores: State<'_, StoreCollection>,
stores: State<'_, StoreCollection<R>>,
path: PathBuf,
) -> Result<Vec<JsonValue>, Error> {
with_store(&app, stores, path, |store| {
Ok(store.cache.values().cloned().collect())
with_store(app, stores, path, |store| {
Ok(store.values().cloned().collect())
})
}
#[tauri::command]
async fn entries<R: Runtime>(
app: AppHandle<R>,
stores: State<'_, StoreCollection>,
stores: State<'_, StoreCollection<R>>,
path: PathBuf,
) -> Result<Vec<(String, JsonValue)>, Error> {
with_store(&app, stores, path, |store| {
Ok(store.cache.clone().into_iter().collect())
with_store(app, stores, path, |store| {
Ok(store
.entries()
.map(|(k, v)| (k.to_owned(), v.to_owned()))
.collect())
})
}
#[tauri::command]
async fn length<R: Runtime>(
app: AppHandle<R>,
stores: State<'_, StoreCollection>,
stores: State<'_, StoreCollection<R>>,
path: PathBuf,
) -> Result<usize, Error> {
with_store(&app, stores, path, |store| Ok(store.cache.len()))
with_store(app, stores, path, |store| Ok(store.len()))
}
#[tauri::command]
async fn load<R: Runtime>(
app: AppHandle<R>,
stores: State<'_, StoreCollection>,
stores: State<'_, StoreCollection<R>>,
path: PathBuf,
) -> Result<(), Error> {
with_store(&app, stores, path, |store| store.load(&app))
with_store(app, stores, path, |store| store.load())
}
#[tauri::command]
async fn save<R: Runtime>(
app: AppHandle<R>,
stores: State<'_, StoreCollection>,
stores: State<'_, StoreCollection<R>>,
path: PathBuf,
) -> Result<(), Error> {
with_store(&app, stores, path, |store| store.save(&app))
with_store(app, stores, path, |store| store.save())
}
#[derive(Default)]
pub struct Builder {
stores: HashMap<PathBuf, Store>,
// #[derive(Default)]
pub struct Builder<R: Runtime> {
stores: HashMap<PathBuf, Store<R>>,
frozen: bool,
}
impl Builder {
impl<R: Runtime> Default for Builder<R> {
fn default() -> Self {
Self {
stores: Default::default(),
frozen: false,
}
}
}
impl<R: Runtime> Builder<R> {
/// Registers a store with the plugin.
///
/// # Examples
@ -265,7 +215,7 @@ impl Builder {
/// # Ok(())
/// # }
/// ```
pub fn store(mut self, store: Store) -> Self {
pub fn store(mut self, store: Store<R>) -> Self {
self.stores.insert(store.path.clone(), store);
self
}
@ -285,7 +235,7 @@ impl Builder {
/// # Ok(())
/// # }
/// ```
pub fn stores<T: IntoIterator<Item = Store>>(mut self, stores: T) -> Self {
pub fn stores<T: IntoIterator<Item = Store<R>>>(mut self, stores: T) -> Self {
self.stores = stores
.into_iter()
.map(|store| (store.path.clone(), store))
@ -331,7 +281,7 @@ impl Builder {
/// # Ok(())
/// # }
/// ```
pub fn build<R: Runtime>(mut self) -> TauriPlugin<R> {
pub fn build(mut self) -> TauriPlugin<R> {
plugin::Builder::new("store")
.invoke_handler(tauri::generate_handler![
set, get, has, delete, clear, reset, keys, values, length, entries, load, save
@ -339,7 +289,7 @@ impl Builder {
.setup(move |app_handle| {
for (path, store) in self.stores.iter_mut() {
// ignore loading errors, just use the default
if let Err(err) = store.load(app_handle) {
if let Err(err) = store.load() {
warn!(
"Failed to load store {:?} from disk: {}. Falling back to default values.",
path, err
@ -356,10 +306,10 @@ impl Builder {
})
.on_event(|app_handle, event| {
if let RunEvent::Exit = event {
let collection = app_handle.state::<StoreCollection>();
let collection = app_handle.state::<StoreCollection<R>>();
for store in collection.stores.lock().expect("mutex poisoned").values() {
if let Err(err) = store.save(app_handle) {
if let Err(err) = store.save() {
eprintln!("failed to save store {:?} with error {:?}", store.path, err);
}
}

@ -2,7 +2,7 @@
// SPDX-License-Identifier: Apache-2.0
// SPDX-License-Identifier: MIT
use crate::Error;
use crate::{ChangePayload, Error};
use serde_json::Value as JsonValue;
use std::{
collections::HashMap,
@ -10,25 +10,28 @@ use std::{
io::Write,
path::PathBuf,
};
use tauri::{AppHandle, Runtime};
use tauri::{AppHandle, Manager, Runtime};
type SerializeFn = fn(&HashMap<String, JsonValue>) -> Result<Vec<u8>, Box<dyn std::error::Error>>;
type DeserializeFn = fn(&[u8]) -> Result<HashMap<String, JsonValue>, Box<dyn std::error::Error>>;
type SerializeFn =
fn(&HashMap<String, JsonValue>) -> Result<Vec<u8>, Box<dyn std::error::Error + Send + Sync>>;
type DeserializeFn =
fn(&[u8]) -> Result<HashMap<String, JsonValue>, Box<dyn std::error::Error + Send + Sync>>;
fn default_serialize(
cache: &HashMap<String, JsonValue>,
) -> Result<Vec<u8>, Box<dyn std::error::Error>> {
) -> Result<Vec<u8>, Box<dyn std::error::Error + Send + Sync>> {
Ok(serde_json::to_vec(&cache)?)
}
fn default_deserialize(
bytes: &[u8],
) -> Result<HashMap<String, JsonValue>, Box<dyn std::error::Error>> {
) -> Result<HashMap<String, JsonValue>, Box<dyn std::error::Error + Send + Sync>> {
serde_json::from_slice(bytes).map_err(Into::into)
}
/// Builds a [`Store`]
pub struct StoreBuilder {
pub struct StoreBuilder<R: Runtime> {
app: AppHandle<R>,
path: PathBuf,
defaults: Option<HashMap<String, JsonValue>>,
cache: HashMap<String, JsonValue>,
@ -36,7 +39,7 @@ pub struct StoreBuilder {
deserialize: DeserializeFn,
}
impl StoreBuilder {
impl<R: Runtime> StoreBuilder<R> {
/// Creates a new [`StoreBuilder`].
///
/// # Examples
@ -49,8 +52,9 @@ impl StoreBuilder {
/// # Ok(())
/// # }
/// ```
pub fn new(path: PathBuf) -> Self {
pub fn new(app: AppHandle<R>, path: PathBuf) -> Self {
Self {
app,
path,
defaults: None,
cache: Default::default(),
@ -147,8 +151,9 @@ impl StoreBuilder {
///
/// # Ok(())
/// # }
pub fn build(self) -> Store {
pub fn build(self) -> Store<R> {
Store {
app: self.app,
path: self.path,
defaults: self.defaults,
cache: self.cache,
@ -159,18 +164,20 @@ impl StoreBuilder {
}
#[derive(Clone)]
pub struct Store {
pub struct Store<R: Runtime> {
app: AppHandle<R>,
pub(crate) path: PathBuf,
pub(crate) defaults: Option<HashMap<String, JsonValue>>,
pub(crate) cache: HashMap<String, JsonValue>,
defaults: Option<HashMap<String, JsonValue>>,
cache: HashMap<String, JsonValue>,
serialize: SerializeFn,
deserialize: DeserializeFn,
}
impl Store {
impl<R: Runtime> Store<R> {
/// Update the store from the on-disk state
pub fn load<R: Runtime>(&mut self, app: &AppHandle<R>) -> Result<(), Error> {
let app_dir = app
pub fn load(&mut self) -> Result<(), Error> {
let app_dir = self
.app
.path_resolver()
.app_data_dir()
.expect("failed to resolve app dir");
@ -184,8 +191,9 @@ impl Store {
}
/// Saves the store to disk
pub fn save<R: Runtime>(&self, app: &AppHandle<R>) -> Result<(), Error> {
let app_dir = app
pub fn save(&self) -> Result<(), Error> {
let app_dir = self
.app
.path_resolver()
.app_data_dir()
.expect("failed to resolve app dir");
@ -199,9 +207,107 @@ impl Store {
Ok(())
}
pub fn insert(&mut self, key: String, value: JsonValue) -> Result<(), Error> {
self.cache.insert(key.clone(), value.clone());
self.app.emit_all(
"store://change",
ChangePayload {
path: &self.path,
key: &key,
value: &value,
},
)?;
Ok(())
}
pub fn get(&self, key: impl AsRef<str>) -> Option<&JsonValue> {
self.cache.get(key.as_ref())
}
pub fn has(&self, key: impl AsRef<str>) -> bool {
self.cache.contains_key(key.as_ref())
}
pub fn delete(&mut self, key: impl AsRef<str>) -> Result<bool, Error> {
let flag = self.cache.remove(key.as_ref()).is_some();
if flag {
self.app.emit_all(
"store://change",
ChangePayload {
path: &self.path,
key: key.as_ref(),
value: &JsonValue::Null,
},
)?;
}
Ok(flag)
}
pub fn clear(&mut self) -> Result<(), Error> {
let keys: Vec<String> = self.cache.keys().cloned().collect();
self.cache.clear();
for key in keys {
self.app.emit_all(
"store://change",
ChangePayload {
path: &self.path,
key: &key,
value: &JsonValue::Null,
},
)?;
}
Ok(())
}
pub fn reset(&mut self) -> Result<(), Error> {
let has_defaults = self.defaults.is_some();
if has_defaults {
if let Some(defaults) = &self.defaults {
for (key, value) in &self.cache {
if defaults.get(key) != Some(value) {
let _ = self.app.emit_all(
"store://change",
ChangePayload {
path: &self.path,
key,
value: defaults.get(key).unwrap_or(&JsonValue::Null),
},
);
}
}
self.cache = defaults.clone();
}
Ok(())
} else {
self.clear()
}
}
pub fn keys(&self) -> impl Iterator<Item = &String> {
self.cache.keys()
}
pub fn values(&self) -> impl Iterator<Item = &JsonValue> {
self.cache.values()
}
pub fn entries(&self) -> impl Iterator<Item = (&String, &JsonValue)> {
self.cache.iter()
}
pub fn len(&self) -> usize {
self.cache.len()
}
pub fn is_empty(&self) -> bool {
self.cache.is_empty()
}
}
impl std::fmt::Debug for Store {
impl<R: Runtime> std::fmt::Debug for Store<R> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("Store")
.field("path", &self.path)

@ -16,7 +16,7 @@ tauri.workspace = true
log.workspace = true
thiserror.workspace = true
iota_stronghold = "1"
iota-crypto = "0.15"
iota-crypto = "0.16"
hex = "0.4"
zeroize = { version = "1", features = ["zeroize_derive"] }

@ -67,7 +67,7 @@ async function download(
await listenToEventIfNeeded("download://progress");
await invoke("plugin:upload|upload", {
await invoke("plugin:upload|download", {
id,
url,
filePath,

@ -225,7 +225,7 @@ impl<R: Runtime> WindowExtInternal for Window<R> {
}
if flags.contains(StateFlags::POSITION) {
let position = self.inner_position()?;
let position = self.outer_position()?;
if let Ok(Some(monitor)) = self.current_monitor() {
// save only window positions that are inside the current monitor
if monitor.contains(position) && !is_maximized {

Loading…
Cancel
Save