refactor: move deleted tauri APIs, prepare for next release (#355)
parent
937e6a5be6
commit
702b7b36bd
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,57 @@
|
||||
use std::path::PathBuf;
|
||||
|
||||
use serde::Deserialize;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct Config {
|
||||
pub scope: FsScope,
|
||||
}
|
||||
|
||||
/// Protocol scope definition.
|
||||
/// It is a list of glob patterns that restrict the API access from the webview.
|
||||
///
|
||||
/// Each pattern can start with a variable that resolves to a system base directory.
|
||||
/// The variables are: `$AUDIO`, `$CACHE`, `$CONFIG`, `$DATA`, `$LOCALDATA`, `$DESKTOP`,
|
||||
/// `$DOCUMENT`, `$DOWNLOAD`, `$EXE`, `$FONT`, `$HOME`, `$PICTURE`, `$PUBLIC`, `$RUNTIME`,
|
||||
/// `$TEMPLATE`, `$VIDEO`, `$RESOURCE`, `$APP`, `$LOG`, `$TEMP`, `$APPCONFIG`, `$APPDATA`,
|
||||
/// `$APPLOCALDATA`, `$APPCACHE`, `$APPLOG`.
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Deserialize)]
|
||||
#[serde(untagged)]
|
||||
pub enum FsScope {
|
||||
/// A list of paths that are allowed by this scope.
|
||||
AllowedPaths(Vec<PathBuf>),
|
||||
/// A complete scope configuration.
|
||||
Scope {
|
||||
/// A list of paths that are allowed by this scope.
|
||||
#[serde(default)]
|
||||
allow: Vec<PathBuf>,
|
||||
/// A list of paths that are not allowed by this scope.
|
||||
/// This gets precedence over the [`Self::Scope::allow`] list.
|
||||
#[serde(default)]
|
||||
deny: Vec<PathBuf>,
|
||||
},
|
||||
}
|
||||
|
||||
impl Default for FsScope {
|
||||
fn default() -> Self {
|
||||
Self::AllowedPaths(Vec::new())
|
||||
}
|
||||
}
|
||||
|
||||
impl FsScope {
|
||||
/// The list of allowed paths.
|
||||
pub fn allowed_paths(&self) -> &Vec<PathBuf> {
|
||||
match self {
|
||||
Self::AllowedPaths(p) => p,
|
||||
Self::Scope { allow, .. } => allow,
|
||||
}
|
||||
}
|
||||
|
||||
/// The list of forbidden paths.
|
||||
pub fn forbidden_paths(&self) -> Option<&Vec<PathBuf>> {
|
||||
match self {
|
||||
Self::AllowedPaths(_) => None,
|
||||
Self::Scope { deny, .. } => Some(deny),
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,368 @@
|
||||
// Copyright 2019-2023 Tauri Programme within The Commons Conservancy
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
use std::{
|
||||
collections::{HashMap, HashSet},
|
||||
fmt,
|
||||
path::{Path, PathBuf, MAIN_SEPARATOR},
|
||||
sync::{Arc, Mutex},
|
||||
};
|
||||
|
||||
use crate::config::FsScope;
|
||||
pub use glob::Pattern;
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::{Manager, Runtime};
|
||||
|
||||
/// Scope change event.
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum Event {
|
||||
/// A path has been allowed.
|
||||
PathAllowed(PathBuf),
|
||||
/// A path has been forbidden.
|
||||
PathForbidden(PathBuf),
|
||||
}
|
||||
|
||||
type EventListener = Box<dyn Fn(&Event) + Send>;
|
||||
|
||||
/// Scope for filesystem access.
|
||||
#[derive(Clone)]
|
||||
pub struct Scope {
|
||||
allowed_patterns: Arc<Mutex<HashSet<Pattern>>>,
|
||||
forbidden_patterns: Arc<Mutex<HashSet<Pattern>>>,
|
||||
event_listeners: Arc<Mutex<HashMap<Uuid, EventListener>>>,
|
||||
}
|
||||
|
||||
impl fmt::Debug for Scope {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_struct("Scope")
|
||||
.field(
|
||||
"allowed_patterns",
|
||||
&self
|
||||
.allowed_patterns
|
||||
.lock()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.map(|p| p.as_str())
|
||||
.collect::<Vec<&str>>(),
|
||||
)
|
||||
.field(
|
||||
"forbidden_patterns",
|
||||
&self
|
||||
.forbidden_patterns
|
||||
.lock()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.map(|p| p.as_str())
|
||||
.collect::<Vec<&str>>(),
|
||||
)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
fn push_pattern<P: AsRef<Path>, F: Fn(&str) -> Result<Pattern, glob::PatternError>>(
|
||||
list: &mut HashSet<Pattern>,
|
||||
pattern: P,
|
||||
f: F,
|
||||
) -> crate::Result<()> {
|
||||
let path: PathBuf = pattern.as_ref().components().collect();
|
||||
list.insert(f(&path.to_string_lossy())?);
|
||||
#[cfg(windows)]
|
||||
{
|
||||
if let Ok(p) = std::fs::canonicalize(&path) {
|
||||
list.insert(f(&p.to_string_lossy())?);
|
||||
} else {
|
||||
list.insert(f(&format!("\\\\?\\{}", path.display()))?);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
impl Scope {
|
||||
/// Creates a new scope from a `FsAllowlistScope` configuration.
|
||||
pub(crate) fn new<R: Runtime, M: Manager<R>>(
|
||||
manager: &M,
|
||||
scope: &FsScope,
|
||||
) -> crate::Result<Self> {
|
||||
let mut allowed_patterns = HashSet::new();
|
||||
for path in scope.allowed_paths() {
|
||||
if let Ok(path) = manager.path().parse(path) {
|
||||
push_pattern(&mut allowed_patterns, path, Pattern::new)?;
|
||||
}
|
||||
}
|
||||
|
||||
let mut forbidden_patterns = HashSet::new();
|
||||
if let Some(forbidden_paths) = scope.forbidden_paths() {
|
||||
for path in forbidden_paths {
|
||||
if let Ok(path) = manager.path().parse(path) {
|
||||
push_pattern(&mut forbidden_patterns, path, Pattern::new)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(Self {
|
||||
allowed_patterns: Arc::new(Mutex::new(allowed_patterns)),
|
||||
forbidden_patterns: Arc::new(Mutex::new(forbidden_patterns)),
|
||||
event_listeners: Default::default(),
|
||||
})
|
||||
}
|
||||
|
||||
/// The list of allowed patterns.
|
||||
pub fn allowed_patterns(&self) -> HashSet<Pattern> {
|
||||
self.allowed_patterns.lock().unwrap().clone()
|
||||
}
|
||||
|
||||
/// The list of forbidden patterns.
|
||||
pub fn forbidden_patterns(&self) -> HashSet<Pattern> {
|
||||
self.forbidden_patterns.lock().unwrap().clone()
|
||||
}
|
||||
|
||||
/// Listen to an event on this scope.
|
||||
pub fn listen<F: Fn(&Event) + Send + 'static>(&self, f: F) -> Uuid {
|
||||
let id = Uuid::new_v4();
|
||||
self.event_listeners.lock().unwrap().insert(id, Box::new(f));
|
||||
id
|
||||
}
|
||||
|
||||
fn trigger(&self, event: Event) {
|
||||
let listeners = self.event_listeners.lock().unwrap();
|
||||
let handlers = listeners.values();
|
||||
for listener in handlers {
|
||||
listener(&event);
|
||||
}
|
||||
}
|
||||
|
||||
/// Extend the allowed patterns with the given directory.
|
||||
///
|
||||
/// After this function has been called, the frontend will be able to use the Tauri API to read
|
||||
/// the directory and all of its files. If `recursive` is `true`, subdirectories will be accessible too.
|
||||
pub fn allow_directory<P: AsRef<Path>>(&self, path: P, recursive: bool) -> crate::Result<()> {
|
||||
let path = path.as_ref();
|
||||
{
|
||||
let mut list = self.allowed_patterns.lock().unwrap();
|
||||
|
||||
// allow the directory to be read
|
||||
push_pattern(&mut list, path, escaped_pattern)?;
|
||||
// allow its files and subdirectories to be read
|
||||
push_pattern(&mut list, path, |p| {
|
||||
escaped_pattern_with(p, if recursive { "**" } else { "*" })
|
||||
})?;
|
||||
}
|
||||
self.trigger(Event::PathAllowed(path.to_path_buf()));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Extend the allowed patterns with the given file path.
|
||||
///
|
||||
/// After this function has been called, the frontend will be able to use the Tauri API to read the contents of this file.
|
||||
pub fn allow_file<P: AsRef<Path>>(&self, path: P) -> crate::Result<()> {
|
||||
let path = path.as_ref();
|
||||
push_pattern(
|
||||
&mut self.allowed_patterns.lock().unwrap(),
|
||||
path,
|
||||
escaped_pattern,
|
||||
)?;
|
||||
self.trigger(Event::PathAllowed(path.to_path_buf()));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Set the given directory path to be forbidden by this scope.
|
||||
///
|
||||
/// **Note:** this takes precedence over allowed paths, so its access gets denied **always**.
|
||||
pub fn forbid_directory<P: AsRef<Path>>(&self, path: P, recursive: bool) -> crate::Result<()> {
|
||||
let path = path.as_ref();
|
||||
{
|
||||
let mut list = self.forbidden_patterns.lock().unwrap();
|
||||
|
||||
// allow the directory to be read
|
||||
push_pattern(&mut list, path, escaped_pattern)?;
|
||||
// allow its files and subdirectories to be read
|
||||
push_pattern(&mut list, path, |p| {
|
||||
escaped_pattern_with(p, if recursive { "**" } else { "*" })
|
||||
})?;
|
||||
}
|
||||
self.trigger(Event::PathForbidden(path.to_path_buf()));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Set the given file path to be forbidden by this scope.
|
||||
///
|
||||
/// **Note:** this takes precedence over allowed paths, so its access gets denied **always**.
|
||||
pub fn forbid_file<P: AsRef<Path>>(&self, path: P) -> crate::Result<()> {
|
||||
let path = path.as_ref();
|
||||
push_pattern(
|
||||
&mut self.forbidden_patterns.lock().unwrap(),
|
||||
path,
|
||||
escaped_pattern,
|
||||
)?;
|
||||
self.trigger(Event::PathForbidden(path.to_path_buf()));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Determines if the given path is allowed on this scope.
|
||||
pub fn is_allowed<P: AsRef<Path>>(&self, path: P) -> bool {
|
||||
let path = path.as_ref();
|
||||
let path = if !path.exists() {
|
||||
crate::Result::Ok(path.to_path_buf())
|
||||
} else {
|
||||
std::fs::canonicalize(path).map_err(Into::into)
|
||||
};
|
||||
|
||||
if let Ok(path) = path {
|
||||
let path: PathBuf = path.components().collect();
|
||||
let options = glob::MatchOptions {
|
||||
// this is needed so `/dir/*` doesn't match files within subdirectories such as `/dir/subdir/file.txt`
|
||||
// see: https://github.com/tauri-apps/tauri/security/advisories/GHSA-6mv3-wm7j-h4w5
|
||||
require_literal_separator: true,
|
||||
// dotfiles are not supposed to be exposed by default
|
||||
#[cfg(unix)]
|
||||
require_literal_leading_dot: true,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let forbidden = self
|
||||
.forbidden_patterns
|
||||
.lock()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.any(|p| p.matches_path_with(&path, options));
|
||||
|
||||
if forbidden {
|
||||
false
|
||||
} else {
|
||||
let allowed = self
|
||||
.allowed_patterns
|
||||
.lock()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.any(|p| p.matches_path_with(&path, options));
|
||||
allowed
|
||||
}
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn escaped_pattern(p: &str) -> Result<Pattern, glob::PatternError> {
|
||||
Pattern::new(&glob::Pattern::escape(p))
|
||||
}
|
||||
|
||||
fn escaped_pattern_with(p: &str, append: &str) -> Result<Pattern, glob::PatternError> {
|
||||
Pattern::new(&format!(
|
||||
"{}{}{append}",
|
||||
glob::Pattern::escape(p),
|
||||
MAIN_SEPARATOR
|
||||
))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::Scope;
|
||||
|
||||
fn new_scope() -> Scope {
|
||||
Scope {
|
||||
allowed_patterns: Default::default(),
|
||||
forbidden_patterns: Default::default(),
|
||||
event_listeners: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn path_is_escaped() {
|
||||
let scope = new_scope();
|
||||
#[cfg(unix)]
|
||||
{
|
||||
scope.allow_directory("/home/tauri/**", false).unwrap();
|
||||
assert!(scope.is_allowed("/home/tauri/**"));
|
||||
assert!(scope.is_allowed("/home/tauri/**/file"));
|
||||
assert!(!scope.is_allowed("/home/tauri/anyfile"));
|
||||
}
|
||||
#[cfg(windows)]
|
||||
{
|
||||
scope.allow_directory("C:\\home\\tauri\\**", false).unwrap();
|
||||
assert!(scope.is_allowed("C:\\home\\tauri\\**"));
|
||||
assert!(scope.is_allowed("C:\\home\\tauri\\**\\file"));
|
||||
assert!(!scope.is_allowed("C:\\home\\tauri\\anyfile"));
|
||||
}
|
||||
|
||||
let scope = new_scope();
|
||||
#[cfg(unix)]
|
||||
{
|
||||
scope.allow_file("/home/tauri/**").unwrap();
|
||||
assert!(scope.is_allowed("/home/tauri/**"));
|
||||
assert!(!scope.is_allowed("/home/tauri/**/file"));
|
||||
assert!(!scope.is_allowed("/home/tauri/anyfile"));
|
||||
}
|
||||
#[cfg(windows)]
|
||||
{
|
||||
scope.allow_file("C:\\home\\tauri\\**").unwrap();
|
||||
assert!(scope.is_allowed("C:\\home\\tauri\\**"));
|
||||
assert!(!scope.is_allowed("C:\\home\\tauri\\**\\file"));
|
||||
assert!(!scope.is_allowed("C:\\home\\tauri\\anyfile"));
|
||||
}
|
||||
|
||||
let scope = new_scope();
|
||||
#[cfg(unix)]
|
||||
{
|
||||
scope.allow_directory("/home/tauri", true).unwrap();
|
||||
scope.forbid_directory("/home/tauri/**", false).unwrap();
|
||||
assert!(!scope.is_allowed("/home/tauri/**"));
|
||||
assert!(!scope.is_allowed("/home/tauri/**/file"));
|
||||
assert!(scope.is_allowed("/home/tauri/**/inner/file"));
|
||||
assert!(scope.is_allowed("/home/tauri/inner/folder/anyfile"));
|
||||
assert!(scope.is_allowed("/home/tauri/anyfile"));
|
||||
}
|
||||
#[cfg(windows)]
|
||||
{
|
||||
scope.allow_directory("C:\\home\\tauri", true).unwrap();
|
||||
scope
|
||||
.forbid_directory("C:\\home\\tauri\\**", false)
|
||||
.unwrap();
|
||||
assert!(!scope.is_allowed("C:\\home\\tauri\\**"));
|
||||
assert!(!scope.is_allowed("C:\\home\\tauri\\**\\file"));
|
||||
assert!(scope.is_allowed("C:\\home\\tauri\\**\\inner\\file"));
|
||||
assert!(scope.is_allowed("C:\\home\\tauri\\inner\\folder\\anyfile"));
|
||||
assert!(scope.is_allowed("C:\\home\\tauri\\anyfile"));
|
||||
}
|
||||
|
||||
let scope = new_scope();
|
||||
#[cfg(unix)]
|
||||
{
|
||||
scope.allow_directory("/home/tauri", true).unwrap();
|
||||
scope.forbid_file("/home/tauri/**").unwrap();
|
||||
assert!(!scope.is_allowed("/home/tauri/**"));
|
||||
assert!(scope.is_allowed("/home/tauri/**/file"));
|
||||
assert!(scope.is_allowed("/home/tauri/**/inner/file"));
|
||||
assert!(scope.is_allowed("/home/tauri/anyfile"));
|
||||
}
|
||||
#[cfg(windows)]
|
||||
{
|
||||
scope.allow_directory("C:\\home\\tauri", true).unwrap();
|
||||
scope.forbid_file("C:\\home\\tauri\\**").unwrap();
|
||||
assert!(!scope.is_allowed("C:\\home\\tauri\\**"));
|
||||
assert!(scope.is_allowed("C:\\home\\tauri\\**\\file"));
|
||||
assert!(scope.is_allowed("C:\\home\\tauri\\**\\inner\\file"));
|
||||
assert!(scope.is_allowed("C:\\home\\tauri\\anyfile"));
|
||||
}
|
||||
|
||||
let scope = new_scope();
|
||||
#[cfg(unix)]
|
||||
{
|
||||
scope.allow_directory("/home/tauri", false).unwrap();
|
||||
assert!(scope.is_allowed("/home/tauri/**"));
|
||||
assert!(!scope.is_allowed("/home/tauri/**/file"));
|
||||
assert!(!scope.is_allowed("/home/tauri/**/inner/file"));
|
||||
assert!(scope.is_allowed("/home/tauri/anyfile"));
|
||||
}
|
||||
#[cfg(windows)]
|
||||
{
|
||||
scope.allow_directory("C:\\home\\tauri", false).unwrap();
|
||||
assert!(scope.is_allowed("C:\\home\\tauri\\**"));
|
||||
assert!(!scope.is_allowed("C:\\home\\tauri\\**\\file"));
|
||||
assert!(!scope.is_allowed("C:\\home\\tauri\\**\\inner\\file"));
|
||||
assert!(scope.is_allowed("C:\\home\\tauri\\anyfile"));
|
||||
}
|
||||
}
|
||||
}
|
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,19 @@
|
||||
use reqwest::Url;
|
||||
use serde::Deserialize;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct Config {
|
||||
pub scope: HttpAllowlistScope,
|
||||
}
|
||||
|
||||
/// HTTP API scope definition.
|
||||
/// It is a list of URLs that can be accessed by the webview when using the HTTP APIs.
|
||||
/// The scoped URL is matched against the request URL using a glob pattern.
|
||||
///
|
||||
/// Examples:
|
||||
/// - "https://**": allows all HTTPS urls
|
||||
/// - "https://*.github.com/tauri-apps/tauri": allows any subdomain of "github.com" with the "tauri-apps/api" path
|
||||
/// - "https://myapi.service.com/users/*": allows access to any URLs that begins with "https://myapi.service.com/users/"
|
||||
#[allow(rustdoc::bare_urls)]
|
||||
#[derive(Debug, Default, PartialEq, Eq, Clone, Deserialize)]
|
||||
pub struct HttpAllowlistScope(pub Vec<Url>);
|
@ -0,0 +1,148 @@
|
||||
use std::path::PathBuf;
|
||||
|
||||
use serde::{de::Error as DeError, Deserialize, Deserializer};
|
||||
|
||||
/// Allowlist for the shell APIs.
|
||||
///
|
||||
/// See more: https://tauri.app/v1/api/config#shellallowlistconfig
|
||||
#[derive(Debug, Default, PartialEq, Eq, Clone, Deserialize)]
|
||||
#[serde(rename_all = "camelCase", deny_unknown_fields)]
|
||||
pub struct Config {
|
||||
/// Access scope for the binary execution APIs.
|
||||
/// Sidecars are automatically enabled.
|
||||
#[serde(default)]
|
||||
pub scope: ShellAllowlistScope,
|
||||
/// Open URL with the user's default application.
|
||||
#[serde(default)]
|
||||
pub open: ShellAllowlistOpen,
|
||||
}
|
||||
|
||||
/// A command allowed to be executed by the webview API.
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
pub struct ShellAllowedCommand {
|
||||
/// The name for this allowed shell command configuration.
|
||||
///
|
||||
/// This name will be used inside of the webview API to call this command along with
|
||||
/// any specified arguments.
|
||||
pub name: String,
|
||||
|
||||
/// The command name.
|
||||
/// It can start with a variable that resolves to a system base directory.
|
||||
/// The variables are: `$AUDIO`, `$CACHE`, `$CONFIG`, `$DATA`, `$LOCALDATA`, `$DESKTOP`,
|
||||
/// `$DOCUMENT`, `$DOWNLOAD`, `$EXE`, `$FONT`, `$HOME`, `$PICTURE`, `$PUBLIC`, `$RUNTIME`,
|
||||
/// `$TEMPLATE`, `$VIDEO`, `$RESOURCE`, `$APP`, `$LOG`, `$TEMP`, `$APPCONFIG`, `$APPDATA`,
|
||||
/// `$APPLOCALDATA`, `$APPCACHE`, `$APPLOG`.
|
||||
// use default just so the schema doesn't flag it as required
|
||||
pub command: PathBuf,
|
||||
|
||||
/// The allowed arguments for the command execution.
|
||||
pub args: ShellAllowedArgs,
|
||||
|
||||
/// If this command is a sidecar command.
|
||||
pub sidecar: bool,
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for ShellAllowedCommand {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
#[derive(Deserialize)]
|
||||
struct InnerShellAllowedCommand {
|
||||
name: String,
|
||||
#[serde(rename = "cmd")]
|
||||
command: Option<PathBuf>,
|
||||
#[serde(default)]
|
||||
args: ShellAllowedArgs,
|
||||
#[serde(default)]
|
||||
sidecar: bool,
|
||||
}
|
||||
|
||||
let config = InnerShellAllowedCommand::deserialize(deserializer)?;
|
||||
|
||||
if !config.sidecar && config.command.is_none() {
|
||||
return Err(DeError::custom(
|
||||
"The shell scope `command` value is required.",
|
||||
));
|
||||
}
|
||||
|
||||
Ok(ShellAllowedCommand {
|
||||
name: config.name,
|
||||
command: config.command.unwrap_or_default(),
|
||||
args: config.args,
|
||||
sidecar: config.sidecar,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// A set of command arguments allowed to be executed by the webview API.
|
||||
///
|
||||
/// A value of `true` will allow any arguments to be passed to the command. `false` will disable all
|
||||
/// arguments. A list of [`ShellAllowedArg`] will set those arguments as the only valid arguments to
|
||||
/// be passed to the attached command configuration.
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Deserialize)]
|
||||
#[serde(untagged, deny_unknown_fields)]
|
||||
#[non_exhaustive]
|
||||
pub enum ShellAllowedArgs {
|
||||
/// Use a simple boolean to allow all or disable all arguments to this command configuration.
|
||||
Flag(bool),
|
||||
|
||||
/// A specific set of [`ShellAllowedArg`] that are valid to call for the command configuration.
|
||||
List(Vec<ShellAllowedArg>),
|
||||
}
|
||||
|
||||
impl Default for ShellAllowedArgs {
|
||||
fn default() -> Self {
|
||||
Self::Flag(false)
|
||||
}
|
||||
}
|
||||
|
||||
/// A command argument allowed to be executed by the webview API.
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Deserialize)]
|
||||
#[serde(untagged, deny_unknown_fields)]
|
||||
#[non_exhaustive]
|
||||
pub enum ShellAllowedArg {
|
||||
/// A non-configurable argument that is passed to the command in the order it was specified.
|
||||
Fixed(String),
|
||||
|
||||
/// A variable that is set while calling the command from the webview API.
|
||||
///
|
||||
Var {
|
||||
/// [regex] validator to require passed values to conform to an expected input.
|
||||
///
|
||||
/// This will require the argument value passed to this variable to match the `validator` regex
|
||||
/// before it will be executed.
|
||||
///
|
||||
/// [regex]: https://docs.rs/regex/latest/regex/#syntax
|
||||
validator: String,
|
||||
},
|
||||
}
|
||||
|
||||
/// Shell scope definition.
|
||||
/// It is a list of command names and associated CLI arguments that restrict the API access from the webview.
|
||||
#[derive(Debug, Default, PartialEq, Eq, Clone, Deserialize)]
|
||||
|
||||
pub struct ShellAllowlistScope(pub Vec<ShellAllowedCommand>);
|
||||
|
||||
/// Defines the `shell > open` api scope.
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Deserialize)]
|
||||
#[serde(untagged, deny_unknown_fields)]
|
||||
#[non_exhaustive]
|
||||
pub enum ShellAllowlistOpen {
|
||||
/// If the shell open API should be enabled.
|
||||
///
|
||||
/// If enabled, the default validation regex (`^((mailto:\w+)|(tel:\w+)|(https?://\w+)).+`) is used.
|
||||
Flag(bool),
|
||||
|
||||
/// Enable the shell open API, with a custom regex that the opened path must match against.
|
||||
///
|
||||
/// If using a custom regex to support a non-http(s) schema, care should be used to prevent values
|
||||
/// that allow flag-like strings to pass validation. e.g. `--enable-debugging`, `-i`, `/R`.
|
||||
Validate(String),
|
||||
}
|
||||
|
||||
impl Default for ShellAllowlistOpen {
|
||||
fn default() -> Self {
|
||||
Self::Flag(false)
|
||||
}
|
||||
}
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,42 @@
|
||||
use serde::{Deserialize, Deserializer};
|
||||
use url::Url;
|
||||
|
||||
/// Updater configuration.
|
||||
#[derive(Debug, Clone, Deserialize)]
|
||||
pub struct Config {
|
||||
#[serde(default)]
|
||||
pub endpoints: Vec<UpdaterEndpoint>,
|
||||
/// Additional arguments given to the NSIS or WiX installer.
|
||||
#[serde(default, alias = "installer-args")]
|
||||
pub installer_args: Vec<String>,
|
||||
}
|
||||
|
||||
/// A URL to an updater server.
|
||||
///
|
||||
/// The URL must use the `https` scheme on production.
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
pub struct UpdaterEndpoint(pub Url);
|
||||
|
||||
impl std::fmt::Display for UpdaterEndpoint {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", self.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for UpdaterEndpoint {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
let url = Url::deserialize(deserializer)?;
|
||||
#[cfg(all(not(debug_assertions), not(feature = "schema")))]
|
||||
{
|
||||
if url.scheme() != "https" {
|
||||
return Err(serde::de::Error::custom(
|
||||
"The configured updater endpoint must use the `https` protocol.",
|
||||
));
|
||||
}
|
||||
}
|
||||
Ok(Self(url))
|
||||
}
|
||||
}
|
@ -0,0 +1,336 @@
|
||||
// Copyright 2019-2023 Tauri Programme within The Commons Conservancy
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
use std::{
|
||||
borrow::Cow,
|
||||
fs,
|
||||
io::{self, Cursor, Read, Seek},
|
||||
path::{self, Path, PathBuf},
|
||||
};
|
||||
|
||||
use crate::{Error, Result};
|
||||
|
||||
/// The archive reader.
|
||||
#[derive(Debug)]
|
||||
pub enum ArchiveReader<R: Read + Seek> {
|
||||
/// A plain reader.
|
||||
Plain(R),
|
||||
/// A GZ- compressed reader (decoder).
|
||||
GzCompressed(Box<flate2::read::GzDecoder<R>>),
|
||||
}
|
||||
|
||||
impl<R: Read + Seek> Read for ArchiveReader<R> {
|
||||
fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
|
||||
match self {
|
||||
Self::Plain(r) => r.read(buf),
|
||||
Self::GzCompressed(decoder) => decoder.read(buf),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R: Read + Seek> ArchiveReader<R> {
|
||||
#[allow(dead_code)]
|
||||
fn get_mut(&mut self) -> &mut R {
|
||||
match self {
|
||||
Self::Plain(r) => r,
|
||||
Self::GzCompressed(decoder) => decoder.get_mut(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// The supported archive formats.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
#[non_exhaustive]
|
||||
pub enum ArchiveFormat {
|
||||
/// Tar archive.
|
||||
Tar(Option<Compression>),
|
||||
/// Zip archive.
|
||||
#[allow(dead_code)]
|
||||
Zip,
|
||||
}
|
||||
|
||||
/// The supported compression types.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
#[non_exhaustive]
|
||||
pub enum Compression {
|
||||
/// Gz compression (e.g. `.tar.gz` archives)
|
||||
Gz,
|
||||
}
|
||||
|
||||
/// The zip entry.
|
||||
pub struct ZipEntry {
|
||||
path: PathBuf,
|
||||
is_dir: bool,
|
||||
file_contents: Vec<u8>,
|
||||
}
|
||||
|
||||
/// A read-only view into an entry of an archive.
|
||||
#[non_exhaustive]
|
||||
pub enum Entry<'a, R: Read> {
|
||||
/// An entry of a tar archive.
|
||||
#[non_exhaustive]
|
||||
Tar(Box<tar::Entry<'a, R>>),
|
||||
/// An entry of a zip archive.
|
||||
#[non_exhaustive]
|
||||
#[allow(dead_code)]
|
||||
Zip(ZipEntry),
|
||||
}
|
||||
|
||||
impl<'a, R: Read> Entry<'a, R> {
|
||||
/// The entry path.
|
||||
pub fn path(&self) -> Result<Cow<'_, Path>> {
|
||||
match self {
|
||||
Self::Tar(e) => e.path().map_err(Into::into),
|
||||
Self::Zip(e) => Ok(Cow::Borrowed(&e.path)),
|
||||
}
|
||||
}
|
||||
|
||||
/// Extract this entry into `into_path`.
|
||||
/// If it's a directory, the target will be created, if it's a file, it'll be extracted at this location.
|
||||
/// Note: You need to include the complete path, with file name and extension.
|
||||
pub fn extract(self, into_path: &path::Path) -> Result<()> {
|
||||
match self {
|
||||
Self::Tar(mut entry) => {
|
||||
// determine if it's a file or a directory
|
||||
if entry.header().entry_type() == tar::EntryType::Directory {
|
||||
// this is a directory, lets create it
|
||||
match fs::create_dir_all(into_path) {
|
||||
Ok(_) => (),
|
||||
Err(e) => {
|
||||
if e.kind() != io::ErrorKind::AlreadyExists {
|
||||
return Err(e.into());
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
let mut out_file = fs::File::create(into_path)?;
|
||||
io::copy(&mut entry, &mut out_file)?;
|
||||
|
||||
// make sure we set permissions
|
||||
if let Ok(mode) = entry.header().mode() {
|
||||
set_perms(into_path, Some(&mut out_file), mode, true)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
Self::Zip(entry) => {
|
||||
if entry.is_dir {
|
||||
// this is a directory, lets create it
|
||||
match fs::create_dir_all(into_path) {
|
||||
Ok(_) => (),
|
||||
Err(e) => {
|
||||
if e.kind() != io::ErrorKind::AlreadyExists {
|
||||
return Err(e.into());
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
let mut out_file = fs::File::create(into_path)?;
|
||||
io::copy(&mut Cursor::new(entry.file_contents), &mut out_file)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// The extract manager to retrieve files from archives.
|
||||
pub struct Extract<'a, R: Read + Seek> {
|
||||
reader: ArchiveReader<R>,
|
||||
archive_format: ArchiveFormat,
|
||||
tar_archive: Option<tar::Archive<&'a mut ArchiveReader<R>>>,
|
||||
}
|
||||
|
||||
impl<'a, R: std::fmt::Debug + Read + Seek> std::fmt::Debug for Extract<'a, R> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("Extract")
|
||||
.field("reader", &self.reader)
|
||||
.field("archive_format", &self.archive_format)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, R: Read + Seek> Extract<'a, R> {
|
||||
/// Create archive from reader.
|
||||
pub fn from_cursor(mut reader: R, archive_format: ArchiveFormat) -> Extract<'a, R> {
|
||||
if reader.rewind().is_err() {
|
||||
#[cfg(debug_assertions)]
|
||||
eprintln!("Could not seek to start of the file");
|
||||
}
|
||||
let compression = if let ArchiveFormat::Tar(compression) = archive_format {
|
||||
compression
|
||||
} else {
|
||||
None
|
||||
};
|
||||
Extract {
|
||||
reader: match compression {
|
||||
Some(Compression::Gz) => {
|
||||
ArchiveReader::GzCompressed(Box::new(flate2::read::GzDecoder::new(reader)))
|
||||
}
|
||||
_ => ArchiveReader::Plain(reader),
|
||||
},
|
||||
archive_format,
|
||||
tar_archive: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Reads the archive content.
|
||||
pub fn with_files<
|
||||
E: Into<Error>,
|
||||
F: FnMut(Entry<'_, &mut ArchiveReader<R>>) -> std::result::Result<bool, E>,
|
||||
>(
|
||||
&'a mut self,
|
||||
mut f: F,
|
||||
) -> Result<()> {
|
||||
match self.archive_format {
|
||||
ArchiveFormat::Tar(_) => {
|
||||
let archive = tar::Archive::new(&mut self.reader);
|
||||
self.tar_archive.replace(archive);
|
||||
for entry in self.tar_archive.as_mut().unwrap().entries()? {
|
||||
let entry = entry?;
|
||||
if entry.path().is_ok() {
|
||||
let stop = f(Entry::Tar(Box::new(entry))).map_err(Into::into)?;
|
||||
if stop {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ArchiveFormat::Zip => {
|
||||
#[cfg(feature = "fs-extract-api")]
|
||||
{
|
||||
let mut archive = zip::ZipArchive::new(self.reader.get_mut())?;
|
||||
let file_names = archive
|
||||
.file_names()
|
||||
.map(|f| f.to_string())
|
||||
.collect::<Vec<String>>();
|
||||
for path in file_names {
|
||||
let mut zip_file = archive.by_name(&path)?;
|
||||
let is_dir = zip_file.is_dir();
|
||||
let mut file_contents = Vec::new();
|
||||
zip_file.read_to_end(&mut file_contents)?;
|
||||
let stop = f(Entry::Zip(ZipEntry {
|
||||
path: path.into(),
|
||||
is_dir,
|
||||
file_contents,
|
||||
}))
|
||||
.map_err(Into::into)?;
|
||||
if stop {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Extract an entire source archive into a specified path. If the source is a single compressed
|
||||
/// file and not an archive, it will be extracted into a file with the same name inside of
|
||||
/// `into_dir`.
|
||||
#[allow(dead_code)]
|
||||
pub fn extract_into(&mut self, into_dir: &path::Path) -> Result<()> {
|
||||
match self.archive_format {
|
||||
ArchiveFormat::Tar(_) => {
|
||||
let mut archive = tar::Archive::new(&mut self.reader);
|
||||
archive.unpack(into_dir)?;
|
||||
}
|
||||
|
||||
ArchiveFormat::Zip => {
|
||||
#[cfg(feature = "fs-extract-api")]
|
||||
{
|
||||
let mut archive = zip::ZipArchive::new(self.reader.get_mut())?;
|
||||
for i in 0..archive.len() {
|
||||
let mut file = archive.by_index(i)?;
|
||||
// Decode the file name from raw bytes instead of using file.name() directly.
|
||||
// file.name() uses String::from_utf8_lossy() which may return messy characters
|
||||
// such as: 爱交易.app/, that does not work as expected.
|
||||
// Here we require the file name must be a valid UTF-8.
|
||||
let file_name = String::from_utf8(file.name_raw().to_vec())?;
|
||||
let out_path = into_dir.join(file_name);
|
||||
if file.is_dir() {
|
||||
fs::create_dir_all(&out_path)?;
|
||||
} else {
|
||||
if let Some(out_path_parent) = out_path.parent() {
|
||||
fs::create_dir_all(out_path_parent)?;
|
||||
}
|
||||
let mut out_file = fs::File::create(&out_path)?;
|
||||
io::copy(&mut file, &mut out_file)?;
|
||||
}
|
||||
// Get and Set permissions
|
||||
#[cfg(unix)]
|
||||
{
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
if let Some(mode) = file.unix_mode() {
|
||||
fs::set_permissions(&out_path, fs::Permissions::from_mode(mode))?;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn set_perms(
|
||||
dst: &Path,
|
||||
f: Option<&mut std::fs::File>,
|
||||
mode: u32,
|
||||
preserve: bool,
|
||||
) -> io::Result<()> {
|
||||
_set_perms(dst, f, mode, preserve).map_err(|_| {
|
||||
io::Error::new(
|
||||
io::ErrorKind::Other,
|
||||
format!(
|
||||
"failed to set permissions to {mode:o} \
|
||||
for `{}`",
|
||||
dst.display()
|
||||
),
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
fn _set_perms(
|
||||
dst: &Path,
|
||||
f: Option<&mut std::fs::File>,
|
||||
mode: u32,
|
||||
preserve: bool,
|
||||
) -> io::Result<()> {
|
||||
use std::os::unix::prelude::*;
|
||||
|
||||
let mode = if preserve { mode } else { mode & 0o777 };
|
||||
let perm = fs::Permissions::from_mode(mode as _);
|
||||
match f {
|
||||
Some(f) => f.set_permissions(perm),
|
||||
None => fs::set_permissions(dst, perm),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
fn _set_perms(
|
||||
dst: &Path,
|
||||
f: Option<&mut std::fs::File>,
|
||||
mode: u32,
|
||||
_preserve: bool,
|
||||
) -> io::Result<()> {
|
||||
if mode & 0o200 == 0o200 {
|
||||
return Ok(());
|
||||
}
|
||||
match f {
|
||||
Some(f) => {
|
||||
let mut perm = f.metadata()?.permissions();
|
||||
perm.set_readonly(true);
|
||||
f.set_permissions(perm)
|
||||
}
|
||||
None => {
|
||||
let mut perm = fs::metadata(dst)?.permissions();
|
||||
perm.set_readonly(true);
|
||||
fs::set_permissions(dst, perm)
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,118 @@
|
||||
// Copyright 2019-2023 Tauri Programme within The Commons Conservancy
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
use ignore::WalkBuilder;
|
||||
use std::{fs, path};
|
||||
|
||||
use crate::Result;
|
||||
|
||||
/// Moves a file from the given path to the specified destination.
|
||||
///
|
||||
/// `source` and `dest` must be on the same filesystem.
|
||||
/// If `replace_using_temp` is specified, the destination file will be
|
||||
/// replaced using the given temporary path.
|
||||
///
|
||||
/// * Errors:
|
||||
/// * Io - copying / renaming
|
||||
#[derive(Debug)]
|
||||
pub struct Move<'a> {
|
||||
source: &'a path::Path,
|
||||
temp: Option<&'a path::Path>,
|
||||
}
|
||||
impl<'a> Move<'a> {
|
||||
/// Specify source file
|
||||
pub fn from_source(source: &'a path::Path) -> Move<'a> {
|
||||
Self { source, temp: None }
|
||||
}
|
||||
|
||||
/// If specified and the destination file already exists, the "destination"
|
||||
/// file will be moved to the given temporary location before the "source"
|
||||
/// file is moved to the "destination" file.
|
||||
///
|
||||
/// In the event of an `io` error while renaming "source" to "destination",
|
||||
/// the temporary file will be moved back to "destination".
|
||||
///
|
||||
/// The `temp` dir must be explicitly provided since `rename` operations require
|
||||
/// files to live on the same filesystem.
|
||||
#[allow(dead_code)]
|
||||
pub fn replace_using_temp(&mut self, temp: &'a path::Path) -> &mut Self {
|
||||
self.temp = Some(temp);
|
||||
self
|
||||
}
|
||||
|
||||
/// Move source file to specified destination (replace whole directory)
|
||||
pub fn to_dest(&self, dest: &path::Path) -> Result<()> {
|
||||
match self.temp {
|
||||
None => {
|
||||
fs::rename(self.source, dest)?;
|
||||
}
|
||||
Some(temp) => {
|
||||
if dest.exists() {
|
||||
fs::rename(dest, temp)?;
|
||||
if let Err(e) = fs::rename(self.source, dest) {
|
||||
fs::rename(temp, dest)?;
|
||||
return Err(e.into());
|
||||
}
|
||||
} else {
|
||||
fs::rename(self.source, dest)?;
|
||||
}
|
||||
}
|
||||
};
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Walk in the source and copy all files and create directories if needed by
|
||||
/// replacing existing elements. (equivalent to a cp -R)
|
||||
#[allow(dead_code)]
|
||||
pub fn walk_to_dest(&self, dest: &path::Path) -> Result<()> {
|
||||
match self.temp {
|
||||
None => {
|
||||
// got no temp -- no need to backup
|
||||
walkdir_and_copy(self.source, dest)?;
|
||||
}
|
||||
Some(temp) => {
|
||||
if dest.exists() {
|
||||
// we got temp and our dest exist, lets make a backup
|
||||
// of current files
|
||||
walkdir_and_copy(dest, temp)?;
|
||||
|
||||
if let Err(e) = walkdir_and_copy(self.source, dest) {
|
||||
// if we got something wrong we reset the dest with our backup
|
||||
fs::rename(temp, dest)?;
|
||||
return Err(e);
|
||||
}
|
||||
} else {
|
||||
// got temp but dest didnt exist
|
||||
walkdir_and_copy(self.source, dest)?;
|
||||
}
|
||||
}
|
||||
};
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
// Walk into the source and create directories, and copy files
|
||||
// Overwriting existing items but keeping untouched the files in the dest
|
||||
// not provided in the source.
|
||||
fn walkdir_and_copy(source: &path::Path, dest: &path::Path) -> Result<()> {
|
||||
let walkdir = WalkBuilder::new(source).hidden(false).build();
|
||||
|
||||
for entry in walkdir {
|
||||
// Check if it's a file
|
||||
|
||||
let element = entry?;
|
||||
let metadata = element.metadata()?;
|
||||
let destination = dest.join(element.path().strip_prefix(source)?);
|
||||
|
||||
// we make sure it's a directory and destination doesnt exist
|
||||
if metadata.is_dir() && !&destination.exists() {
|
||||
fs::create_dir_all(&destination)?;
|
||||
}
|
||||
|
||||
// we make sure it's a file
|
||||
if metadata.is_file() {
|
||||
fs::copy(element.path(), destination)?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
Loading…
Reference in new issue