feat(updater): refactor and improvements (#431)
Co-authored-by: Lucas Nogueira <lucas@tauri.studio> Co-authored-by: Lucas Nogueira <lucas@tauri.app>pull/540/merge
parent
84133b57b8
commit
4ab90f048e
@ -0,0 +1,6 @@
|
||||
---
|
||||
"updater": minor
|
||||
"updater-js": minor
|
||||
---
|
||||
|
||||
The updater plugin is recieving a few changes to improve consistency and ergonomics of the Rust and JS APIs
|
@ -0,0 +1,54 @@
|
||||
# Copyright 2019-2023 Tauri Programme within The Commons Conservancy
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
# SPDX-License-Identifier: MIT
|
||||
|
||||
name: integration tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- v1
|
||||
- v2
|
||||
paths:
|
||||
- ".github/workflows/integration-tests.yml"
|
||||
- "plugins/updater/src/**"
|
||||
pull_request:
|
||||
branches:
|
||||
- v1
|
||||
- v2
|
||||
paths:
|
||||
- ".github/workflows/integration-tests.yml"
|
||||
- "plugins/updater/src/**"
|
||||
|
||||
jobs:
|
||||
run-integration-tests:
|
||||
runs-on: ${{ matrix.platform }}
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
platform: [ubuntu-latest, macos-latest, windows-latest]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: install stable
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
|
||||
- name: install Linux dependencies
|
||||
if: matrix.platform == 'ubuntu-latest'
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y webkit2gtk-4.1 libayatana-appindicator3-dev libfuse2
|
||||
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
- name: install Tauri CLI
|
||||
run: cargo install tauri-cli --git https://github.com/tauri-apps/tauri --branch dev
|
||||
|
||||
- name: run integration tests
|
||||
run: cargo test --test '*' -- --ignored
|
@ -1 +1 @@
|
||||
if("__TAURI__"in window){var __TAURI_UPDATER__=function(e){"use strict";var n=Object.defineProperty,t=(e,n,t)=>{if(!n.has(e))throw TypeError("Cannot "+t)},r=(e,n,r)=>(t(e,n,"read from private field"),r?r.call(e):n.get(e)),i=(e,n,r,i)=>(t(e,n,"write to private field"),i?i.call(e,r):n.set(e,r),r);function a(e,n=!1){let t=window.crypto.getRandomValues(new Uint32Array(1))[0],r=`_${t}`;return Object.defineProperty(window,r,{value:t=>(n&&Reflect.deleteProperty(window,r),e?.(t)),writable:!1,configurable:!0}),t}((e,t)=>{for(var r in t)n(e,r,{get:t[r],enumerable:!0})})({},{Channel:()=>s,PluginListener:()=>l,addPluginListener:()=>c,convertFileSrc:()=>u,invoke:()=>d,transformCallback:()=>a});var o,s=class{constructor(){this.__TAURI_CHANNEL_MARKER__=!0,((e,n,t)=>{if(n.has(e))throw TypeError("Cannot add the same private member more than once");n instanceof WeakSet?n.add(e):n.set(e,t)})(this,o,(()=>{})),this.id=a((e=>{r(this,o).call(this,e)}))}set onmessage(e){i(this,o,e)}get onmessage(){return r(this,o)}toJSON(){return`__CHANNEL__:${this.id}`}};o=new WeakMap;var l=class{constructor(e,n,t){this.plugin=e,this.event=n,this.channelId=t}async unregister(){return d(`plugin:${this.plugin}|remove_listener`,{event:this.event,channelId:this.channelId})}};async function c(e,n,t){let r=new s;return r.onmessage=t,d(`plugin:${e}|register_listener`,{event:n,handler:r}).then((()=>new l(e,n,r.id)))}async function d(e,n={}){return new Promise(((t,r)=>{let i=a((e=>{t(e),Reflect.deleteProperty(window,`_${o}`)}),!0),o=a((e=>{r(e),Reflect.deleteProperty(window,`_${i}`)}),!0);window.__TAURI_IPC__({cmd:e,callback:i,error:o,...n})}))}function u(e,n="asset"){let t=encodeURIComponent(e);return navigator.userAgent.includes("Windows")?`https://${n}.localhost/${t}`:`${n}://localhost/${t}`}class _{constructor(e){this.response=e}async downloadAndInstall(e){const n=new s;return null!=e&&(n.onmessage=e),d("plugin:updater|download_and_install",{onEvent:n})}}return e.Update=_,e.check=async function(e){return d("plugin:updater|check",{...e}).then((e=>new _(e)))},e}({});Object.defineProperty(window.__TAURI__,"updater",{value:__TAURI_UPDATER__})}
|
||||
if("__TAURI__"in window){var __TAURI_UPDATER__=function(e){"use strict";var n=Object.defineProperty,t=(e,n,t)=>{if(!n.has(e))throw TypeError("Cannot "+t)},r=(e,n,r)=>(t(e,n,"read from private field"),r?r.call(e):n.get(e)),i=(e,n,r,i)=>(t(e,n,"write to private field"),i?i.call(e,r):n.set(e,r),r);function a(e,n=!1){let t=window.crypto.getRandomValues(new Uint32Array(1))[0],r=`_${t}`;return Object.defineProperty(window,r,{value:t=>(n&&Reflect.deleteProperty(window,r),e?.(t)),writable:!1,configurable:!0}),t}((e,t)=>{for(var r in t)n(e,r,{get:t[r],enumerable:!0})})({},{Channel:()=>o,PluginListener:()=>l,addPluginListener:()=>c,convertFileSrc:()=>u,invoke:()=>d,transformCallback:()=>a});var s,o=class{constructor(){this.__TAURI_CHANNEL_MARKER__=!0,((e,n,t)=>{if(n.has(e))throw TypeError("Cannot add the same private member more than once");n instanceof WeakSet?n.add(e):n.set(e,t)})(this,s,(()=>{})),this.id=a((e=>{r(this,s).call(this,e)}))}set onmessage(e){i(this,s,e)}get onmessage(){return r(this,s)}toJSON(){return`__CHANNEL__:${this.id}`}};s=new WeakMap;var l=class{constructor(e,n,t){this.plugin=e,this.event=n,this.channelId=t}async unregister(){return d(`plugin:${this.plugin}|remove_listener`,{event:this.event,channelId:this.channelId})}};async function c(e,n,t){let r=new o;return r.onmessage=t,d(`plugin:${e}|register_listener`,{event:n,handler:r}).then((()=>new l(e,n,r.id)))}async function d(e,n={}){return new Promise(((t,r)=>{let i=a((e=>{t(e),Reflect.deleteProperty(window,`_${s}`)}),!0),s=a((e=>{r(e),Reflect.deleteProperty(window,`_${i}`)}),!0);window.__TAURI_IPC__({cmd:e,callback:i,error:s,...n})}))}function u(e,n="asset"){let t=encodeURIComponent(e);return navigator.userAgent.includes("Windows")?`https://${n}.localhost/${t}`:`${n}://localhost/${t}`}class _{constructor(e){this.currentVersion=e.currentVersion,this.version=e.version,this.date=e.date,this.body=e.body}async downloadAndInstall(e){const n=new o;return null!=e&&(n.onmessage=e),d("plugin:updater|download_and_install",{onEvent:n})}}return e.Update=_,e.check=async function(e){return(null==e?void 0:e.headers)&&(e.headers=Array.from(new Headers(e.headers).entries())),d("plugin:updater|check",{...e}).then((e=>e.available?new _(e):null))},e}({});Object.defineProperty(window.__TAURI__,"updater",{value:__TAURI_UPDATER__})}
|
||||
|
@ -0,0 +1,862 @@
|
||||
// Copyright 2019-2023 Tauri Programme within The Commons Conservancy
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
io::{Cursor, Read},
|
||||
path::{Path, PathBuf},
|
||||
str::FromStr,
|
||||
time::Duration,
|
||||
};
|
||||
|
||||
use base64::Engine;
|
||||
use futures_util::StreamExt;
|
||||
use http::HeaderName;
|
||||
use minisign_verify::{PublicKey, Signature};
|
||||
use reqwest::{
|
||||
header::{HeaderMap, HeaderValue},
|
||||
Client, StatusCode,
|
||||
};
|
||||
use semver::Version;
|
||||
use serde::{de::Error as DeError, Deserialize, Deserializer, Serialize};
|
||||
use tauri::utils::{config::UpdaterConfig, platform::current_exe};
|
||||
use time::OffsetDateTime;
|
||||
use url::Url;
|
||||
|
||||
use crate::error::{Error, Result};
|
||||
|
||||
#[derive(Debug, Deserialize, Serialize, Clone)]
|
||||
pub struct ReleaseManifestPlatform {
|
||||
/// Download URL for the platform
|
||||
pub url: Url,
|
||||
/// Signature for the platform
|
||||
pub signature: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Serialize, Clone)]
|
||||
#[serde(untagged)]
|
||||
pub enum RemoteReleaseInner {
|
||||
Dynamic(ReleaseManifestPlatform),
|
||||
Static {
|
||||
platforms: HashMap<String, ReleaseManifestPlatform>,
|
||||
},
|
||||
}
|
||||
|
||||
/// Information about a release returned by the remote update server.
|
||||
///
|
||||
/// This type can have one of two shapes: Server Format (Dynamic Format) and Static Format.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct RemoteRelease {
|
||||
/// Version to install.
|
||||
pub version: Version,
|
||||
/// Release notes.
|
||||
pub notes: Option<String>,
|
||||
/// Release date.
|
||||
pub pub_date: Option<OffsetDateTime>,
|
||||
/// Release data.
|
||||
pub data: RemoteReleaseInner,
|
||||
}
|
||||
|
||||
impl RemoteRelease {
|
||||
/// The release's download URL for the given target.
|
||||
pub fn download_url(&self, target: &str) -> Result<&Url> {
|
||||
match self.data {
|
||||
RemoteReleaseInner::Dynamic(ref platform) => Ok(&platform.url),
|
||||
RemoteReleaseInner::Static { ref platforms } => platforms
|
||||
.get(target)
|
||||
.map_or(Err(Error::TargetNotFound(target.to_string())), |p| {
|
||||
Ok(&p.url)
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
/// The release's signature for the given target.
|
||||
pub fn signature(&self, target: &str) -> Result<&String> {
|
||||
match self.data {
|
||||
RemoteReleaseInner::Dynamic(ref platform) => Ok(&platform.signature),
|
||||
RemoteReleaseInner::Static { ref platforms } => platforms
|
||||
.get(target)
|
||||
.map_or(Err(Error::TargetNotFound(target.to_string())), |platform| {
|
||||
Ok(&platform.signature)
|
||||
}),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct UpdaterBuilder {
|
||||
current_version: Version,
|
||||
config: crate::Config,
|
||||
updater_config: UpdaterConfig,
|
||||
version_comparator: Option<Box<dyn Fn(Version, RemoteRelease) -> bool + Send + Sync>>,
|
||||
executable_path: Option<PathBuf>,
|
||||
target: Option<String>,
|
||||
endpoints: Option<Vec<Url>>,
|
||||
headers: HeaderMap,
|
||||
timeout: Option<Duration>,
|
||||
installer_args: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
impl UpdaterBuilder {
|
||||
pub fn new(
|
||||
current_version: Version,
|
||||
config: crate::Config,
|
||||
updater_config: UpdaterConfig,
|
||||
) -> Self {
|
||||
Self {
|
||||
current_version,
|
||||
config,
|
||||
updater_config,
|
||||
version_comparator: None,
|
||||
executable_path: None,
|
||||
target: None,
|
||||
endpoints: None,
|
||||
headers: Default::default(),
|
||||
timeout: None,
|
||||
installer_args: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn version_comparator<F: Fn(Version, RemoteRelease) -> bool + Send + Sync + 'static>(
|
||||
mut self,
|
||||
f: F,
|
||||
) -> Self {
|
||||
self.version_comparator = Some(Box::new(f));
|
||||
self
|
||||
}
|
||||
|
||||
pub fn target(mut self, target: impl Into<String>) -> Self {
|
||||
self.target.replace(target.into());
|
||||
self
|
||||
}
|
||||
|
||||
pub fn endpoints(mut self, endpoints: Vec<Url>) -> Self {
|
||||
self.endpoints.replace(endpoints);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn executable_path<P: AsRef<Path>>(mut self, p: P) -> Self {
|
||||
self.executable_path.replace(p.as_ref().into());
|
||||
self
|
||||
}
|
||||
|
||||
pub fn header<K, V>(mut self, key: K, value: V) -> Result<Self>
|
||||
where
|
||||
HeaderName: TryFrom<K>,
|
||||
<HeaderName as TryFrom<K>>::Error: Into<http::Error>,
|
||||
HeaderValue: TryFrom<V>,
|
||||
<HeaderValue as TryFrom<V>>::Error: Into<http::Error>,
|
||||
{
|
||||
let key: std::result::Result<HeaderName, http::Error> = key.try_into().map_err(Into::into);
|
||||
let value: std::result::Result<HeaderValue, http::Error> =
|
||||
value.try_into().map_err(Into::into);
|
||||
self.headers.insert(key?, value?);
|
||||
|
||||
Ok(self)
|
||||
}
|
||||
|
||||
pub fn timeout(mut self, timeout: Duration) -> Self {
|
||||
self.timeout = Some(timeout);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn installer_args<I, S>(mut self, args: I) -> Self
|
||||
where
|
||||
I: IntoIterator<Item = S>,
|
||||
S: Into<String>,
|
||||
{
|
||||
self.installer_args
|
||||
.replace(args.into_iter().map(Into::into).collect());
|
||||
self
|
||||
}
|
||||
|
||||
pub fn build(self) -> Result<Updater> {
|
||||
let endpoints = self
|
||||
.endpoints
|
||||
.unwrap_or_else(|| self.config.endpoints.into_iter().map(|e| e.0).collect());
|
||||
|
||||
if endpoints.is_empty() {
|
||||
return Err(Error::EmptyEndpoints);
|
||||
};
|
||||
|
||||
let arch = get_updater_arch().ok_or(Error::UnsupportedArch)?;
|
||||
let (target, json_target) = if let Some(target) = self.target {
|
||||
(target.clone(), target)
|
||||
} else {
|
||||
let target = get_updater_target().ok_or(Error::UnsupportedOs)?;
|
||||
(target.to_string(), format!("{target}-{arch}"))
|
||||
};
|
||||
|
||||
let executable_path = self.executable_path.clone().unwrap_or(current_exe()?);
|
||||
|
||||
// Get the extract_path from the provided executable_path
|
||||
let extract_path = if cfg!(target_os = "linux") {
|
||||
executable_path
|
||||
} else {
|
||||
extract_path_from_executable(&executable_path)?
|
||||
};
|
||||
|
||||
Ok(Updater {
|
||||
config: self.updater_config,
|
||||
current_version: self.current_version,
|
||||
version_comparator: self.version_comparator,
|
||||
timeout: self.timeout,
|
||||
endpoints,
|
||||
installer_args: self.installer_args.unwrap_or(self.config.installer_args),
|
||||
arch,
|
||||
target,
|
||||
json_target,
|
||||
headers: self.headers,
|
||||
extract_path,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Updater {
|
||||
config: UpdaterConfig,
|
||||
current_version: Version,
|
||||
version_comparator: Option<Box<dyn Fn(Version, RemoteRelease) -> bool + Send + Sync>>,
|
||||
timeout: Option<Duration>,
|
||||
endpoints: Vec<Url>,
|
||||
#[allow(dead_code)]
|
||||
installer_args: Vec<String>,
|
||||
arch: &'static str,
|
||||
// The `{{target}}` variable we replace in the endpoint
|
||||
target: String,
|
||||
// The value we search if the updater server returns a JSON with the `platforms` object
|
||||
json_target: String,
|
||||
headers: HeaderMap,
|
||||
extract_path: PathBuf,
|
||||
}
|
||||
|
||||
impl Updater {
|
||||
pub async fn check(&self) -> Result<Option<Update>> {
|
||||
// we want JSON only
|
||||
let mut headers = self.headers.clone();
|
||||
headers.insert("Accept", HeaderValue::from_str("application/json").unwrap());
|
||||
|
||||
// Set SSL certs for linux if they aren't available.
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
if std::env::var_os("SSL_CERT_FILE").is_none() {
|
||||
std::env::set_var("SSL_CERT_FILE", "/etc/ssl/certs/ca-certificates.crt");
|
||||
}
|
||||
if std::env::var_os("SSL_CERT_DIR").is_none() {
|
||||
std::env::set_var("SSL_CERT_DIR", "/etc/ssl/certs");
|
||||
}
|
||||
}
|
||||
|
||||
let mut remote_release: Option<RemoteRelease> = None;
|
||||
let mut last_error: Option<Error> = None;
|
||||
for url in &self.endpoints {
|
||||
// replace {{current_version}}, {{target}} and {{arch}} in the provided URL
|
||||
// this is useful if we need to query example
|
||||
// https://releases.myapp.com/update/{{target}}/{{arch}}/{{current_version}}
|
||||
// will be translated into ->
|
||||
// https://releases.myapp.com/update/darwin/aarch64/1.0.0
|
||||
// The main objective is if the update URL is defined via the Cargo.toml
|
||||
// the URL will be generated dynamically
|
||||
let url: Url = url
|
||||
.to_string()
|
||||
.replace("{{current_version}}", &self.current_version.to_string())
|
||||
.replace("{{target}}", &self.target)
|
||||
.replace("{{arch}}", self.arch)
|
||||
.parse()?;
|
||||
|
||||
let mut request = Client::new().get(url).headers(headers.clone());
|
||||
if let Some(timeout) = self.timeout {
|
||||
request = request.timeout(timeout);
|
||||
}
|
||||
let response = request.send().await;
|
||||
|
||||
if let Ok(res) = response {
|
||||
if res.status().is_success() {
|
||||
// no updates found!
|
||||
if StatusCode::NO_CONTENT == res.status() {
|
||||
return Ok(None);
|
||||
};
|
||||
|
||||
match serde_json::from_value::<RemoteRelease>(res.json().await?)
|
||||
.map_err(Into::into)
|
||||
{
|
||||
Ok(release) => {
|
||||
last_error = None;
|
||||
remote_release = Some(release);
|
||||
// we found a relase, break the loop
|
||||
break;
|
||||
}
|
||||
Err(err) => last_error = Some(err),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Last error is cleaned on success.
|
||||
// Shouldn't be triggered if we had a successfull call
|
||||
if let Some(error) = last_error {
|
||||
return Err(error);
|
||||
}
|
||||
|
||||
// Extracted remote metadata
|
||||
let release = remote_release.ok_or(Error::ReleaseNotFound)?;
|
||||
|
||||
let should_update = match self.version_comparator.as_ref() {
|
||||
Some(comparator) => comparator(self.current_version.clone(), release.clone()),
|
||||
None => release.version > self.current_version,
|
||||
};
|
||||
|
||||
let update = if should_update {
|
||||
Some(Update {
|
||||
current_version: self.current_version.to_string(),
|
||||
config: self.config.clone(),
|
||||
target: self.target.clone(),
|
||||
extract_path: self.extract_path.clone(),
|
||||
installer_args: self.installer_args.clone(),
|
||||
version: release.version.to_string(),
|
||||
date: release.pub_date,
|
||||
download_url: release.download_url(&self.json_target)?.to_owned(),
|
||||
body: release.notes.clone(),
|
||||
signature: release.signature(&self.json_target)?.to_owned(),
|
||||
timeout: self.timeout,
|
||||
headers: self.headers.clone(),
|
||||
})
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
Ok(update)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Update {
|
||||
config: UpdaterConfig,
|
||||
/// Update description
|
||||
pub body: Option<String>,
|
||||
/// Version used to check for update
|
||||
pub current_version: String,
|
||||
/// Version announced
|
||||
pub version: String,
|
||||
/// Update publish date
|
||||
pub date: Option<OffsetDateTime>,
|
||||
/// Target
|
||||
pub target: String,
|
||||
/// Extract path
|
||||
#[allow(unused)]
|
||||
extract_path: PathBuf,
|
||||
#[allow(unused)]
|
||||
installer_args: Vec<String>,
|
||||
/// Download URL announced
|
||||
pub download_url: Url,
|
||||
/// Signature announced
|
||||
pub signature: String,
|
||||
/// Request timeout
|
||||
pub timeout: Option<Duration>,
|
||||
/// Request headers
|
||||
pub headers: HeaderMap,
|
||||
}
|
||||
|
||||
impl Update {
|
||||
/// Downloads the updater package, verifies it then return it as bytes.
|
||||
///
|
||||
/// Use [`Update::install`] to install it
|
||||
pub async fn download<C: Fn(usize, Option<u64>), D: FnOnce()>(
|
||||
&self,
|
||||
on_chunk: C,
|
||||
on_download_finish: D,
|
||||
) -> Result<Vec<u8>> {
|
||||
// set our headers
|
||||
let mut headers = self.headers.clone();
|
||||
headers.insert(
|
||||
"Accept",
|
||||
HeaderValue::from_str("application/octet-stream").unwrap(),
|
||||
);
|
||||
headers.insert(
|
||||
"User-Agent",
|
||||
HeaderValue::from_str("tauri-updater").unwrap(),
|
||||
);
|
||||
|
||||
let mut request = Client::new()
|
||||
.get(self.download_url.clone())
|
||||
.headers(headers);
|
||||
if let Some(timeout) = self.timeout {
|
||||
request = request.timeout(timeout);
|
||||
}
|
||||
let response = request.send().await?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
return Err(Error::Network(format!(
|
||||
"Download request failed with status: {}",
|
||||
response.status()
|
||||
)));
|
||||
}
|
||||
|
||||
let content_length: Option<u64> = response
|
||||
.headers()
|
||||
.get("Content-Length")
|
||||
.and_then(|value| value.to_str().ok())
|
||||
.and_then(|value| value.parse().ok());
|
||||
|
||||
let mut buffer = Vec::new();
|
||||
|
||||
let mut stream = response.bytes_stream();
|
||||
while let Some(chunk) = stream.next().await {
|
||||
let chunk = chunk?;
|
||||
let bytes = chunk.as_ref().to_vec();
|
||||
on_chunk(bytes.len(), content_length);
|
||||
buffer.extend(bytes);
|
||||
}
|
||||
|
||||
on_download_finish();
|
||||
|
||||
let mut update_buffer = Cursor::new(&buffer);
|
||||
|
||||
verify_signature(&mut update_buffer, &self.signature, &self.config.pubkey)?;
|
||||
|
||||
Ok(buffer)
|
||||
}
|
||||
|
||||
/// Installs the updater package downloaded by [`Update::download`]
|
||||
pub fn install(&self, bytes: Vec<u8>) -> Result<()> {
|
||||
self.install_inner(bytes)
|
||||
}
|
||||
|
||||
/// Downloads and installs the updater package
|
||||
pub async fn download_and_install<C: Fn(usize, Option<u64>), D: FnOnce()>(
|
||||
&self,
|
||||
on_chunk: C,
|
||||
on_download_finish: D,
|
||||
) -> Result<()> {
|
||||
let bytes = self.download(on_chunk, on_download_finish).await?;
|
||||
self.install(bytes)
|
||||
}
|
||||
|
||||
#[cfg(any(target_os = "android", target_os = "ios"))]
|
||||
fn install_inner(&self, bytes: Vec<u8>) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Windows
|
||||
//
|
||||
// ### Expected structure:
|
||||
// ├── [AppName]_[version]_x64.msi.zip # ZIP generated by tauri-bundler
|
||||
// │ └──[AppName]_[version]_x64.msi # Application MSI
|
||||
// ├── [AppName]_[version]_x64-setup.exe.zip # ZIP generated by tauri-bundler
|
||||
// │ └──[AppName]_[version]_x64-setup.exe # NSIS installer
|
||||
// └── ...
|
||||
//
|
||||
// ## MSI
|
||||
// Update server can provide a MSI for Windows. (Generated with tauri-bundler from *Wix*)
|
||||
// To replace current version of the application. In later version we'll offer
|
||||
// incremental update to push specific binaries.
|
||||
//
|
||||
// ## EXE
|
||||
// Update server can provide a custom EXE (installer) who can run any task.
|
||||
#[cfg(windows)]
|
||||
fn install_inner(&self, bytes: Vec<u8>) -> Result<()> {
|
||||
use std::{ffi::OsStr, fs, process::Command};
|
||||
|
||||
// FIXME: We need to create a memory buffer with the MSI and then run it.
|
||||
// (instead of extracting the MSI to a temp path)
|
||||
//
|
||||
// The tricky part is the MSI need to be exposed and spawned so the memory allocation
|
||||
// shouldn't drop but we should be able to pass the reference so we can drop it once the installation
|
||||
// is done, otherwise we have a huge memory leak.
|
||||
|
||||
let archive = Cursor::new(bytes);
|
||||
|
||||
let tmp_dir = tempfile::Builder::new().tempdir()?.into_path();
|
||||
|
||||
// extract the buffer to the tmp_dir
|
||||
// we extract our signed archive into our final directory without any temp file
|
||||
let mut extractor = zip::ZipArchive::new(archive)?;
|
||||
|
||||
// extract the msi
|
||||
extractor.extract(&tmp_dir)?;
|
||||
|
||||
let paths = fs::read_dir(&tmp_dir)?;
|
||||
|
||||
let system_root = std::env::var("SYSTEMROOT");
|
||||
let powershell_path = system_root.as_ref().map_or_else(
|
||||
|_| "powershell.exe".to_string(),
|
||||
|p| format!("{p}\\System32\\WindowsPowerShell\\v1.0\\powershell.exe"),
|
||||
);
|
||||
|
||||
for path in paths {
|
||||
let found_path = path?.path();
|
||||
// we support 2 type of files exe & msi for now
|
||||
// If it's an `exe` we expect an installer not a runtime.
|
||||
if found_path.extension() == Some(OsStr::new("exe")) {
|
||||
// we need to wrap the installer path in quotes for Start-Process
|
||||
let mut installer_arg = std::ffi::OsString::new();
|
||||
installer_arg.push("\"");
|
||||
installer_arg.push(&found_path);
|
||||
installer_arg.push("\"");
|
||||
|
||||
// Run the installer
|
||||
Command::new(powershell_path)
|
||||
.args(["-NoProfile", "-WindowStyle", "Hidden"])
|
||||
.args(["Start-Process"])
|
||||
.arg(found_path)
|
||||
.arg("-ArgumentList")
|
||||
.arg(
|
||||
[
|
||||
self.config.windows.install_mode.nsis_args(),
|
||||
self.installer_args
|
||||
.iter()
|
||||
.map(AsRef::as_ref)
|
||||
.collect::<Vec<_>>()
|
||||
.as_slice(),
|
||||
]
|
||||
.concat()
|
||||
.join(", "),
|
||||
)
|
||||
.spawn()
|
||||
.expect("installer failed to start");
|
||||
|
||||
std::process::exit(0);
|
||||
} else if found_path.extension() == Some(OsStr::new("msi")) {
|
||||
// we need to wrap the current exe path in quotes for Start-Process
|
||||
let mut current_exe_arg = std::ffi::OsString::new();
|
||||
current_exe_arg.push("\"");
|
||||
current_exe_arg.push(current_exe()?);
|
||||
current_exe_arg.push("\"");
|
||||
|
||||
let mut msi_path_arg = std::ffi::OsString::new();
|
||||
msi_path_arg.push("\"\"\"");
|
||||
msi_path_arg.push(&found_path);
|
||||
msi_path_arg.push("\"\"\"");
|
||||
|
||||
let msiexec_args = self
|
||||
.config
|
||||
.windows
|
||||
.install_mode
|
||||
.msiexec_args()
|
||||
.iter()
|
||||
.map(|p| p.to_string())
|
||||
.collect::<Vec<String>>();
|
||||
|
||||
// run the installer and relaunch the application
|
||||
let powershell_install_res = Command::new(powershell_path)
|
||||
.args(["-NoProfile", "-WindowStyle", "Hidden"])
|
||||
.args([
|
||||
"Start-Process",
|
||||
"-Wait",
|
||||
"-FilePath",
|
||||
"$env:SYSTEMROOT\\System32\\msiexec.exe",
|
||||
"-ArgumentList",
|
||||
])
|
||||
.arg("/i,")
|
||||
.arg(msi_path_arg)
|
||||
.arg(format!(", {}, /promptrestart;", msiexec_args.join(", ")))
|
||||
.arg("Start-Process")
|
||||
.arg(current_exe_arg)
|
||||
.spawn();
|
||||
if powershell_install_res.is_err() {
|
||||
// fallback to running msiexec directly - relaunch won't be available
|
||||
// we use this here in case powershell fails in an older machine somehow
|
||||
let msiexec_path = system_root.as_ref().map_or_else(
|
||||
|_| "msiexec.exe".to_string(),
|
||||
|p| format!("{p}\\System32\\msiexec.exe"),
|
||||
);
|
||||
let _ = Command::new(msiexec_path)
|
||||
.arg("/i")
|
||||
.arg(found_path)
|
||||
.args(msiexec_args)
|
||||
.arg("/promptrestart")
|
||||
.spawn();
|
||||
}
|
||||
|
||||
std::process::exit(0);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Linux (AppImage)
|
||||
//
|
||||
// ### Expected structure:
|
||||
// ├── [AppName]_[version]_amd64.AppImage.tar.gz # GZ generated by tauri-bundler
|
||||
// │ └──[AppName]_[version]_amd64.AppImage # Application AppImage
|
||||
// └── ...
|
||||
//
|
||||
// We should have an AppImage already installed to be able to copy and install
|
||||
// the extract_path is the current AppImage path
|
||||
// tmp_dir is where our new AppImage is found
|
||||
#[cfg(any(
|
||||
target_os = "linux",
|
||||
target_os = "dragonfly",
|
||||
target_os = "freebsd",
|
||||
target_os = "netbsd",
|
||||
target_os = "openbsd"
|
||||
))]
|
||||
fn install_inner(&self, bytes: Vec<u8>) -> Result<()> {
|
||||
use std::{
|
||||
ffi::OsStr,
|
||||
os::unix::fs::{MetadataExt, PermissionsExt},
|
||||
};
|
||||
let archive = Cursor::new(bytes);
|
||||
let extract_path_metadata = self.extract_path.metadata()?;
|
||||
|
||||
let tmp_dir_locations = vec![
|
||||
Box::new(|| Some(std::env::temp_dir())) as Box<dyn FnOnce() -> Option<PathBuf>>,
|
||||
Box::new(dirs_next::cache_dir),
|
||||
Box::new(|| Some(self.extract_path.parent().unwrap().to_path_buf())),
|
||||
];
|
||||
|
||||
for tmp_dir_location in tmp_dir_locations {
|
||||
if let Some(tmp_dir_location) = tmp_dir_location() {
|
||||
let tmp_dir = tempfile::Builder::new()
|
||||
.prefix("tauri_current_app")
|
||||
.tempdir_in(tmp_dir_location)?;
|
||||
let tmp_dir_metadata = tmp_dir.path().metadata()?;
|
||||
|
||||
if extract_path_metadata.dev() == tmp_dir_metadata.dev() {
|
||||
let mut perms = tmp_dir_metadata.permissions();
|
||||
perms.set_mode(0o700);
|
||||
std::fs::set_permissions(tmp_dir.path(), perms)?;
|
||||
|
||||
let tmp_app_image = &tmp_dir.path().join("current_app.AppImage");
|
||||
|
||||
// create a backup of our current app image
|
||||
std::fs::rename(&self.extract_path, tmp_app_image)?;
|
||||
|
||||
// extract the buffer to the tmp_dir
|
||||
// we extract our signed archive into our final directory without any temp file
|
||||
let mut archive = tar::Archive::new(archive);
|
||||
for mut entry in archive.entries()?.flatten() {
|
||||
if let Ok(path) = entry.path() {
|
||||
if path.extension() == Some(OsStr::new("AppImage")) {
|
||||
// if something went wrong during the extraction, we should restore previous app
|
||||
if let Err(err) = entry.unpack(&self.extract_path) {
|
||||
std::fs::rename(tmp_app_image, &self.extract_path)?;
|
||||
return Err(err.into());
|
||||
}
|
||||
// early finish we have everything we need here
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Err(Error::TempDirNotOnSameMountPoint)
|
||||
}
|
||||
|
||||
// MacOS
|
||||
//
|
||||
// ### Expected structure:
|
||||
// ├── [AppName]_[version]_x64.app.tar.gz # GZ generated by tauri-bundler
|
||||
// │ └──[AppName].app # Main application
|
||||
// │ └── Contents # Application contents...
|
||||
// │ └── ...
|
||||
// └── ...
|
||||
#[cfg(target_os = "macos")]
|
||||
fn install_inner(&self, bytes: Vec<u8>) -> Result<()> {
|
||||
let archive = Cursor::new(bytes);
|
||||
let mut extracted_files: Vec<PathBuf> = Vec::new();
|
||||
|
||||
// the first file in the tar.gz will always be
|
||||
// <app_name>/Contents
|
||||
let tmp_dir = tempfile::Builder::new()
|
||||
.prefix("tauri_current_app")
|
||||
.tempdir()?;
|
||||
|
||||
// create backup of our current app
|
||||
std::fs::rename(&self.extract_path, tmp_dir.path())?;
|
||||
|
||||
let mut archive = tar::Archive::new(archive);
|
||||
for mut entry in archive.entries()?.flatten() {
|
||||
if let Ok(path) = entry.path() {
|
||||
// skip the first folder (should be the app name)
|
||||
let collected_path: PathBuf = path.iter().skip(1).collect();
|
||||
let extraction_path = &self.extract_path.join(collected_path);
|
||||
|
||||
// if something went wrong during the extraction, we should restore previous app
|
||||
if let Err(err) = entry.unpack(extraction_path) {
|
||||
for file in &extracted_files {
|
||||
// delete all the files we extracted
|
||||
if file.is_dir() {
|
||||
std::fs::remove_dir(file)?;
|
||||
} else {
|
||||
std::fs::remove_file(file)?;
|
||||
}
|
||||
}
|
||||
std::fs::rename(tmp_dir.path(), &self.extract_path)?;
|
||||
return Err(err.into());
|
||||
}
|
||||
|
||||
extracted_files.push(extraction_path.to_path_buf());
|
||||
}
|
||||
}
|
||||
|
||||
let _ = std::process::Command::new("touch")
|
||||
.arg(&self.extract_path)
|
||||
.status();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Gets the target string used on the updater.
|
||||
pub fn target() -> Option<String> {
|
||||
if let (Some(target), Some(arch)) = (get_updater_target(), get_updater_arch()) {
|
||||
Some(format!("{target}-{arch}"))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn get_updater_target() -> Option<&'static str> {
|
||||
if cfg!(target_os = "linux") {
|
||||
Some("linux")
|
||||
} else if cfg!(target_os = "macos") {
|
||||
// TODO shouldn't this be macos instead?
|
||||
Some("darwin")
|
||||
} else if cfg!(target_os = "windows") {
|
||||
Some("windows")
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn get_updater_arch() -> Option<&'static str> {
|
||||
if cfg!(target_arch = "x86") {
|
||||
Some("i686")
|
||||
} else if cfg!(target_arch = "x86_64") {
|
||||
Some("x86_64")
|
||||
} else if cfg!(target_arch = "arm") {
|
||||
Some("armv7")
|
||||
} else if cfg!(target_arch = "aarch64") {
|
||||
Some("aarch64")
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn extract_path_from_executable(executable_path: &Path) -> Result<PathBuf> {
|
||||
// Return the path of the current executable by default
|
||||
// Example C:\Program Files\My App\
|
||||
let extract_path = executable_path
|
||||
.parent()
|
||||
.map(PathBuf::from)
|
||||
.ok_or(Error::FailedToDetermineExtractPath)?;
|
||||
|
||||
// MacOS example binary is in /Applications/TestApp.app/Contents/MacOS/myApp
|
||||
// We need to get /Applications/<app>.app
|
||||
// TODO(lemarier): Need a better way here
|
||||
// Maybe we could search for <*.app> to get the right path
|
||||
#[cfg(target_os = "macos")]
|
||||
if extract_path
|
||||
.display()
|
||||
.to_string()
|
||||
.contains("Contents/MacOS")
|
||||
{
|
||||
return extract_path
|
||||
.parent()
|
||||
.map(PathBuf::from)
|
||||
.ok_or(Error::FailedToDetermineExtractPath)?
|
||||
.parent()
|
||||
.map(PathBuf::from)
|
||||
.ok_or(Error::FailedToDetermineExtractPath);
|
||||
}
|
||||
|
||||
Ok(extract_path)
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for RemoteRelease {
|
||||
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
#[derive(Deserialize)]
|
||||
struct InnerRemoteRelease {
|
||||
#[serde(alias = "name", deserialize_with = "parse_version")]
|
||||
version: Version,
|
||||
notes: Option<String>,
|
||||
pub_date: Option<String>,
|
||||
platforms: Option<HashMap<String, ReleaseManifestPlatform>>,
|
||||
// dynamic platform response
|
||||
url: Option<Url>,
|
||||
signature: Option<String>,
|
||||
}
|
||||
|
||||
let release = InnerRemoteRelease::deserialize(deserializer)?;
|
||||
|
||||
let pub_date = if let Some(date) = release.pub_date {
|
||||
Some(
|
||||
OffsetDateTime::parse(&date, &time::format_description::well_known::Rfc3339)
|
||||
.map_err(|e| DeError::custom(format!("invalid value for `pub_date`: {e}")))?,
|
||||
)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
Ok(RemoteRelease {
|
||||
version: release.version,
|
||||
notes: release.notes,
|
||||
pub_date,
|
||||
data: if let Some(platforms) = release.platforms {
|
||||
RemoteReleaseInner::Static { platforms }
|
||||
} else {
|
||||
RemoteReleaseInner::Dynamic(ReleaseManifestPlatform {
|
||||
url: release.url.ok_or_else(|| {
|
||||
DeError::custom("the `url` field was not set on the updater response")
|
||||
})?,
|
||||
signature: release.signature.ok_or_else(|| {
|
||||
DeError::custom("the `signature` field was not set on the updater response")
|
||||
})?,
|
||||
})
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_version<'de, D>(deserializer: D) -> std::result::Result<Version, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
let str = String::deserialize(deserializer)?;
|
||||
|
||||
Version::from_str(str.trim_start_matches('v')).map_err(serde::de::Error::custom)
|
||||
}
|
||||
|
||||
// Validate signature
|
||||
// need to be public because its been used
|
||||
// by our tests in the bundler
|
||||
//
|
||||
// NOTE: The buffer position is not reset.
|
||||
pub fn verify_signature<R>(
|
||||
archive_reader: &mut R,
|
||||
release_signature: &str,
|
||||
pub_key: &str,
|
||||
) -> Result<bool>
|
||||
where
|
||||
R: Read,
|
||||
{
|
||||
// we need to convert the pub key
|
||||
let pub_key_decoded = base64_to_string(pub_key)?;
|
||||
let public_key = PublicKey::decode(&pub_key_decoded)?;
|
||||
let signature_base64_decoded = base64_to_string(release_signature)?;
|
||||
let signature = Signature::decode(&signature_base64_decoded)?;
|
||||
|
||||
// read all bytes until EOF in the buffer
|
||||
let mut data = Vec::new();
|
||||
archive_reader.read_to_end(&mut data)?;
|
||||
|
||||
// Validate signature or bail out
|
||||
public_key.verify(&data, &signature, true)?;
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
fn base64_to_string(base64_string: &str) -> Result<String> {
|
||||
let decoded_string = &base64::engine::general_purpose::STANDARD.decode(base64_string)?;
|
||||
let result = std::str::from_utf8(decoded_string)
|
||||
.map_err(|_| Error::SignatureUtf8(base64_string.into()))?
|
||||
.to_string();
|
||||
Ok(result)
|
||||
}
|
File diff suppressed because it is too large
Load Diff
@ -1,344 +0,0 @@
|
||||
// Copyright 2019-2023 Tauri Programme within The Commons Conservancy
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
use std::{
|
||||
borrow::Cow,
|
||||
fs,
|
||||
io::{self, Read, Seek},
|
||||
path::{self, Path},
|
||||
};
|
||||
|
||||
use crate::{Error, Result};
|
||||
|
||||
/// The archive reader.
|
||||
#[derive(Debug)]
|
||||
pub enum ArchiveReader<R: Read + Seek> {
|
||||
/// A plain reader.
|
||||
Plain(R),
|
||||
/// A GZ- compressed reader (decoder).
|
||||
GzCompressed(Box<flate2::read::GzDecoder<R>>),
|
||||
}
|
||||
|
||||
impl<R: Read + Seek> Read for ArchiveReader<R> {
|
||||
fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
|
||||
match self {
|
||||
Self::Plain(r) => r.read(buf),
|
||||
Self::GzCompressed(decoder) => decoder.read(buf),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R: Read + Seek> ArchiveReader<R> {
|
||||
#[allow(dead_code)]
|
||||
fn get_mut(&mut self) -> &mut R {
|
||||
match self {
|
||||
Self::Plain(r) => r,
|
||||
Self::GzCompressed(decoder) => decoder.get_mut(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// The supported archive formats.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
#[non_exhaustive]
|
||||
pub enum ArchiveFormat {
|
||||
/// Tar archive.
|
||||
Tar(Option<Compression>),
|
||||
/// Zip archive.
|
||||
#[cfg(windows)]
|
||||
Zip,
|
||||
}
|
||||
|
||||
impl ArchiveFormat {
|
||||
fn compression(self) -> Option<Compression> {
|
||||
match self {
|
||||
Self::Tar(c) => c,
|
||||
#[allow(unreachable_patterns)]
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// The supported compression types.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
#[non_exhaustive]
|
||||
pub enum Compression {
|
||||
/// Gz compression (e.g. `.tar.gz` archives)
|
||||
Gz,
|
||||
}
|
||||
|
||||
/// The zip entry.
|
||||
#[cfg(windows)]
|
||||
pub struct ZipEntry {
|
||||
path: std::path::PathBuf,
|
||||
is_dir: bool,
|
||||
file_contents: Vec<u8>,
|
||||
}
|
||||
|
||||
/// A read-only view into an entry of an archive.
|
||||
#[non_exhaustive]
|
||||
pub enum Entry<'a, R: Read> {
|
||||
/// An entry of a tar archive.
|
||||
#[non_exhaustive]
|
||||
Tar(Box<tar::Entry<'a, R>>),
|
||||
/// An entry of a zip archive.
|
||||
#[non_exhaustive]
|
||||
#[cfg(windows)]
|
||||
Zip(ZipEntry),
|
||||
}
|
||||
|
||||
impl<'a, R: Read> Entry<'a, R> {
|
||||
/// The entry path.
|
||||
pub fn path(&self) -> Result<Cow<'_, Path>> {
|
||||
match self {
|
||||
Self::Tar(e) => e.path().map_err(Into::into),
|
||||
#[cfg(windows)]
|
||||
Self::Zip(e) => Ok(Cow::Borrowed(&e.path)),
|
||||
}
|
||||
}
|
||||
|
||||
/// Extract this entry into `into_path`.
|
||||
/// If it's a directory, the target will be created, if it's a file, it'll be extracted at this location.
|
||||
/// Note: You need to include the complete path, with file name and extension.
|
||||
pub fn extract(self, into_path: &path::Path) -> Result<()> {
|
||||
match self {
|
||||
Self::Tar(mut entry) => {
|
||||
// determine if it's a file or a directory
|
||||
if entry.header().entry_type() == tar::EntryType::Directory {
|
||||
// this is a directory, lets create it
|
||||
match fs::create_dir_all(into_path) {
|
||||
Ok(_) => (),
|
||||
Err(e) => {
|
||||
if e.kind() != io::ErrorKind::AlreadyExists {
|
||||
return Err(e.into());
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
let mut out_file = fs::File::create(into_path)?;
|
||||
io::copy(&mut entry, &mut out_file)?;
|
||||
|
||||
// make sure we set permissions
|
||||
if let Ok(mode) = entry.header().mode() {
|
||||
set_perms(into_path, Some(&mut out_file), mode, true)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
#[cfg(windows)]
|
||||
Self::Zip(entry) => {
|
||||
if entry.is_dir {
|
||||
// this is a directory, lets create it
|
||||
match fs::create_dir_all(into_path) {
|
||||
Ok(_) => (),
|
||||
Err(e) => {
|
||||
if e.kind() != io::ErrorKind::AlreadyExists {
|
||||
return Err(e.into());
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
let mut out_file = fs::File::create(into_path)?;
|
||||
io::copy(
|
||||
&mut std::io::Cursor::new(entry.file_contents),
|
||||
&mut out_file,
|
||||
)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// The extract manager to retrieve files from archives.
|
||||
pub struct Extract<'a, R: Read + Seek> {
|
||||
reader: ArchiveReader<R>,
|
||||
archive_format: ArchiveFormat,
|
||||
tar_archive: Option<tar::Archive<&'a mut ArchiveReader<R>>>,
|
||||
}
|
||||
|
||||
impl<'a, R: std::fmt::Debug + Read + Seek> std::fmt::Debug for Extract<'a, R> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("Extract")
|
||||
.field("reader", &self.reader)
|
||||
.field("archive_format", &self.archive_format)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, R: Read + Seek> Extract<'a, R> {
|
||||
/// Create archive from reader.
|
||||
pub fn from_cursor(mut reader: R, archive_format: ArchiveFormat) -> Extract<'a, R> {
|
||||
if reader.rewind().is_err() {
|
||||
#[cfg(debug_assertions)]
|
||||
eprintln!("Could not seek to start of the file");
|
||||
}
|
||||
let compression = archive_format.compression();
|
||||
Extract {
|
||||
reader: match compression {
|
||||
Some(Compression::Gz) => {
|
||||
ArchiveReader::GzCompressed(Box::new(flate2::read::GzDecoder::new(reader)))
|
||||
}
|
||||
_ => ArchiveReader::Plain(reader),
|
||||
},
|
||||
archive_format,
|
||||
tar_archive: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Reads the archive content.
|
||||
pub fn with_files<
|
||||
E: Into<Error>,
|
||||
F: FnMut(Entry<'_, &mut ArchiveReader<R>>) -> std::result::Result<bool, E>,
|
||||
>(
|
||||
&'a mut self,
|
||||
mut f: F,
|
||||
) -> Result<()> {
|
||||
match self.archive_format {
|
||||
ArchiveFormat::Tar(_) => {
|
||||
let archive = tar::Archive::new(&mut self.reader);
|
||||
self.tar_archive.replace(archive);
|
||||
for entry in self.tar_archive.as_mut().unwrap().entries()? {
|
||||
let entry = entry?;
|
||||
if entry.path().is_ok() {
|
||||
let stop = f(Entry::Tar(Box::new(entry))).map_err(Into::into)?;
|
||||
if stop {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
ArchiveFormat::Zip => {
|
||||
let mut archive = zip::ZipArchive::new(self.reader.get_mut())?;
|
||||
let file_names = archive
|
||||
.file_names()
|
||||
.map(|f| f.to_string())
|
||||
.collect::<Vec<String>>();
|
||||
for path in file_names {
|
||||
let mut zip_file = archive.by_name(&path)?;
|
||||
let is_dir = zip_file.is_dir();
|
||||
let mut file_contents = Vec::new();
|
||||
zip_file.read_to_end(&mut file_contents)?;
|
||||
let stop = f(Entry::Zip(ZipEntry {
|
||||
path: path.into(),
|
||||
is_dir,
|
||||
file_contents,
|
||||
}))
|
||||
.map_err(Into::into)?;
|
||||
if stop {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Extract an entire source archive into a specified path. If the source is a single compressed
|
||||
/// file and not an archive, it will be extracted into a file with the same name inside of
|
||||
/// `into_dir`.
|
||||
#[allow(dead_code)]
|
||||
pub fn extract_into(&mut self, into_dir: &path::Path) -> Result<()> {
|
||||
match self.archive_format {
|
||||
ArchiveFormat::Tar(_) => {
|
||||
let mut archive = tar::Archive::new(&mut self.reader);
|
||||
archive.unpack(into_dir)?;
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
ArchiveFormat::Zip => {
|
||||
let mut archive = zip::ZipArchive::new(self.reader.get_mut())?;
|
||||
for i in 0..archive.len() {
|
||||
let mut file = archive.by_index(i)?;
|
||||
// Decode the file name from raw bytes instead of using file.name() directly.
|
||||
// file.name() uses String::from_utf8_lossy() which may return messy characters
|
||||
// such as: 爱交易.app/, that does not work as expected.
|
||||
// Here we require the file name must be a valid UTF-8.
|
||||
let file_name = String::from_utf8(file.name_raw().to_vec())?;
|
||||
let out_path = into_dir.join(file_name);
|
||||
if file.is_dir() {
|
||||
fs::create_dir_all(&out_path)?;
|
||||
} else {
|
||||
if let Some(out_path_parent) = out_path.parent() {
|
||||
fs::create_dir_all(out_path_parent)?;
|
||||
}
|
||||
let mut out_file = fs::File::create(&out_path)?;
|
||||
io::copy(&mut file, &mut out_file)?;
|
||||
}
|
||||
// Get and Set permissions
|
||||
#[cfg(unix)]
|
||||
{
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
if let Some(mode) = file.unix_mode() {
|
||||
fs::set_permissions(&out_path, fs::Permissions::from_mode(mode))?;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn set_perms(
|
||||
dst: &Path,
|
||||
f: Option<&mut std::fs::File>,
|
||||
mode: u32,
|
||||
preserve: bool,
|
||||
) -> io::Result<()> {
|
||||
_set_perms(dst, f, mode, preserve).map_err(|_| {
|
||||
io::Error::new(
|
||||
io::ErrorKind::Other,
|
||||
format!(
|
||||
"failed to set permissions to {mode:o} \
|
||||
for `{}`",
|
||||
dst.display()
|
||||
),
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
fn _set_perms(
|
||||
dst: &Path,
|
||||
f: Option<&mut std::fs::File>,
|
||||
mode: u32,
|
||||
preserve: bool,
|
||||
) -> io::Result<()> {
|
||||
use std::os::unix::prelude::*;
|
||||
|
||||
let mode = if preserve { mode } else { mode & 0o777 };
|
||||
let perm = fs::Permissions::from_mode(mode as _);
|
||||
match f {
|
||||
Some(f) => f.set_permissions(perm),
|
||||
None => fs::set_permissions(dst, perm),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
fn _set_perms(
|
||||
dst: &Path,
|
||||
f: Option<&mut std::fs::File>,
|
||||
mode: u32,
|
||||
_preserve: bool,
|
||||
) -> io::Result<()> {
|
||||
if mode & 0o200 == 0o200 {
|
||||
return Ok(());
|
||||
}
|
||||
match f {
|
||||
Some(f) => {
|
||||
let mut perm = f.metadata()?.permissions();
|
||||
perm.set_readonly(true);
|
||||
f.set_permissions(perm)
|
||||
}
|
||||
None => {
|
||||
let mut perm = fs::metadata(dst)?.permissions();
|
||||
perm.set_readonly(true);
|
||||
fs::set_permissions(dst, perm)
|
||||
}
|
||||
}
|
||||
}
|
@ -1,308 +0,0 @@
|
||||
// Copyright 2019-2023 Tauri Programme within The Commons Conservancy
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
//! The Tauri updater.
|
||||
//!
|
||||
//! The updater is focused on making Tauri's application updates **as safe and transparent as updates to a website**.
|
||||
//!
|
||||
//! For a full guide on setting up the updater, see <https://tauri.app/v1/guides/distribution/updater>.
|
||||
//!
|
||||
//! Check [`UpdateBuilder`] to see how to trigger and customize the updater at runtime.
|
||||
//! ```
|
||||
|
||||
mod core;
|
||||
mod extract;
|
||||
mod move_file;
|
||||
|
||||
use std::time::Duration;
|
||||
|
||||
use http::header::{HeaderName, HeaderValue};
|
||||
use semver::Version;
|
||||
use time::OffsetDateTime;
|
||||
|
||||
pub use self::core::{DownloadEvent, RemoteRelease};
|
||||
|
||||
use tauri::{AppHandle, Manager, Runtime};
|
||||
|
||||
use crate::{Result, UpdaterState};
|
||||
|
||||
/// Gets the target string used on the updater.
|
||||
pub fn target() -> Option<String> {
|
||||
if let (Some(target), Some(arch)) = (core::get_updater_target(), core::get_updater_arch()) {
|
||||
Some(format!("{target}-{arch}"))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, serde::Serialize)]
|
||||
struct StatusEvent {
|
||||
status: String,
|
||||
error: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, serde::Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct DownloadProgressEvent {
|
||||
chunk_length: usize,
|
||||
content_length: Option<u64>,
|
||||
}
|
||||
|
||||
#[derive(Clone, serde::Serialize)]
|
||||
struct UpdateManifest {
|
||||
version: String,
|
||||
date: Option<String>,
|
||||
body: String,
|
||||
}
|
||||
|
||||
/// An update check builder.
|
||||
#[derive(Debug)]
|
||||
pub struct UpdateBuilder<R: Runtime> {
|
||||
inner: core::UpdateBuilder<R>,
|
||||
}
|
||||
|
||||
impl<R: Runtime> UpdateBuilder<R> {
|
||||
/// Sets the current platform's target name for the updater.
|
||||
///
|
||||
/// The target is injected in the endpoint URL by replacing `{{target}}`.
|
||||
/// Note that this does not affect the `{{arch}}` variable.
|
||||
///
|
||||
/// If the updater response JSON includes the `platforms` field,
|
||||
/// that object must contain a value for the target key.
|
||||
///
|
||||
/// By default Tauri uses `$OS_NAME` as the replacement for `{{target}}`
|
||||
/// and `$OS_NAME-$ARCH` as the key in the `platforms` object,
|
||||
/// where `$OS_NAME` is the current operating system name "linux", "windows" or "darwin")
|
||||
/// and `$ARCH` is one of the supported architectures ("i686", "x86_64", "armv7" or "aarch64").
|
||||
///
|
||||
/// See [`Builder::updater_target`](crate::Builder#method.updater_target) for a way to set the target globally.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ## Use a macOS Universal binary target name
|
||||
///
|
||||
/// In this example, we set the updater target only on macOS.
|
||||
/// On other platforms, we set the default target.
|
||||
/// Note that `{{target}}` will be replaced with `darwin-universal`,
|
||||
/// but `{{arch}}` is still the running platform's architecture.
|
||||
///
|
||||
/// ```no_run
|
||||
/// use tauri_plugin_updater::{target as updater_target, UpdaterExt};
|
||||
/// tauri::Builder::default()
|
||||
/// .setup(|app| {
|
||||
/// let handle = app.handle();
|
||||
/// tauri::async_runtime::spawn(async move {
|
||||
/// let builder = handle.updater().target(if cfg!(target_os = "macos") {
|
||||
/// "darwin-universal".to_string()
|
||||
/// } else {
|
||||
/// updater_target().unwrap()
|
||||
/// });
|
||||
/// match builder.check().await {
|
||||
/// Ok(update) => {}
|
||||
/// Err(error) => {}
|
||||
/// }
|
||||
/// });
|
||||
/// Ok(())
|
||||
/// });
|
||||
/// ```
|
||||
///
|
||||
/// ## Append debug information to the target
|
||||
///
|
||||
/// This allows you to provide updates for both debug and release applications.
|
||||
///
|
||||
/// ```no_run
|
||||
/// use tauri_plugin_updater::{UpdaterExt, target as updater_target};
|
||||
/// tauri::Builder::default()
|
||||
/// .setup(|app| {
|
||||
/// let handle = app.handle();
|
||||
/// tauri::async_runtime::spawn(async move {
|
||||
/// let kind = if cfg!(debug_assertions) { "debug" } else { "release" };
|
||||
/// let builder = handle.updater().target(format!("{}-{kind}", updater_target().unwrap()));
|
||||
/// match builder.check().await {
|
||||
/// Ok(update) => {}
|
||||
/// Err(error) => {}
|
||||
/// }
|
||||
/// });
|
||||
/// Ok(())
|
||||
/// });
|
||||
/// ```
|
||||
///
|
||||
/// ## Use the platform's target triple
|
||||
///
|
||||
/// ```no_run
|
||||
/// use tauri_plugin_updater::UpdaterExt;
|
||||
/// tauri::Builder::default()
|
||||
/// .setup(|app| {
|
||||
/// let handle = app.handle();
|
||||
/// tauri::async_runtime::spawn(async move {
|
||||
/// let builder = handle.updater().target(tauri::utils::platform::target_triple().unwrap());
|
||||
/// match builder.check().await {
|
||||
/// Ok(update) => {}
|
||||
/// Err(error) => {}
|
||||
/// }
|
||||
/// });
|
||||
/// Ok(())
|
||||
/// });
|
||||
/// ```
|
||||
pub fn target(mut self, target: impl Into<String>) -> Self {
|
||||
self.inner = self.inner.target(target);
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets a closure that is invoked to compare the current version and the latest version returned by the updater server.
|
||||
/// The first argument is the current version, and the second one is the latest version.
|
||||
///
|
||||
/// The closure must return `true` if the update should be installed.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// - Always install the version returned by the server:
|
||||
///
|
||||
/// ```no_run
|
||||
/// use tauri_plugin_updater::UpdaterExt;
|
||||
/// tauri::Builder::default()
|
||||
/// .setup(|app| {
|
||||
/// app.handle().updater().should_install(|_current, _latest| true);
|
||||
/// Ok(())
|
||||
/// });
|
||||
/// ```
|
||||
pub fn should_install<F: FnOnce(&Version, &RemoteRelease) -> bool + Send + 'static>(
|
||||
mut self,
|
||||
f: F,
|
||||
) -> Self {
|
||||
self.inner = self.inner.should_install(f);
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets the timeout for the requests to the updater endpoints.
|
||||
pub fn timeout(mut self, timeout: Duration) -> Self {
|
||||
self.inner = self.inner.timeout(timeout);
|
||||
self
|
||||
}
|
||||
|
||||
/// Add a `Header` to the request.
|
||||
pub fn header<K, V>(mut self, key: K, value: V) -> Result<Self>
|
||||
where
|
||||
HeaderName: TryFrom<K>,
|
||||
<HeaderName as TryFrom<K>>::Error: Into<http::Error>,
|
||||
HeaderValue: TryFrom<V>,
|
||||
<HeaderValue as TryFrom<V>>::Error: Into<http::Error>,
|
||||
{
|
||||
self.inner = self.inner.header(key, value)?;
|
||||
Ok(self)
|
||||
}
|
||||
|
||||
/// Check if an update is available.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```no_run
|
||||
/// use tauri_plugin_updater::{UpdaterExt, DownloadEvent};
|
||||
/// tauri::Builder::default()
|
||||
/// .setup(|app| {
|
||||
/// let handle = app.handle();
|
||||
/// tauri::async_runtime::spawn(async move {
|
||||
/// match handle.updater().check().await {
|
||||
/// Ok(update) => {
|
||||
/// if update.is_update_available() {
|
||||
/// update.download_and_install(|event| {
|
||||
/// match event {
|
||||
/// DownloadEvent::Started { content_length } => println!("started! size: {:?}", content_length),
|
||||
/// DownloadEvent::Progress { chunk_length } => println!("Downloaded {chunk_length} bytes"),
|
||||
/// DownloadEvent::Finished => println!("download finished"),
|
||||
/// }
|
||||
/// }).await.unwrap();
|
||||
/// }
|
||||
/// }
|
||||
/// Err(e) => {
|
||||
/// println!("failed to get update: {}", e);
|
||||
/// }
|
||||
/// }
|
||||
/// });
|
||||
/// Ok(())
|
||||
/// });
|
||||
/// ```
|
||||
pub async fn check(self) -> Result<UpdateResponse<R>> {
|
||||
self.inner
|
||||
.build()
|
||||
.await
|
||||
.map(|update| UpdateResponse { update })
|
||||
}
|
||||
}
|
||||
|
||||
/// The response of an updater check.
|
||||
pub struct UpdateResponse<R: Runtime> {
|
||||
update: core::Update<R>,
|
||||
}
|
||||
|
||||
impl<R: Runtime> Clone for UpdateResponse<R> {
|
||||
fn clone(&self) -> Self {
|
||||
Self {
|
||||
update: self.update.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<R: Runtime> UpdateResponse<R> {
|
||||
/// Whether the updater found a newer release or not.
|
||||
pub fn is_update_available(&self) -> bool {
|
||||
self.update.should_update
|
||||
}
|
||||
|
||||
/// The current version of the application as read by the updater.
|
||||
pub fn current_version(&self) -> &Version {
|
||||
&self.update.current_version
|
||||
}
|
||||
|
||||
/// The latest version of the application found by the updater.
|
||||
pub fn latest_version(&self) -> &str {
|
||||
&self.update.version
|
||||
}
|
||||
|
||||
/// The update date.
|
||||
pub fn date(&self) -> Option<&OffsetDateTime> {
|
||||
self.update.date.as_ref()
|
||||
}
|
||||
|
||||
/// The update description.
|
||||
pub fn body(&self) -> Option<&String> {
|
||||
self.update.body.as_ref()
|
||||
}
|
||||
|
||||
/// Downloads and installs the update.
|
||||
pub async fn download_and_install<F: Fn(DownloadEvent)>(&self, on_event: F) -> Result<()> {
|
||||
// Launch updater download process
|
||||
// macOS we display the `Ready to restart dialog` asking to restart
|
||||
// Windows is closing the current App and launch the downloaded MSI when ready (the process stop here)
|
||||
// Linux we replace the AppImage by launching a new install, it start a new AppImage instance, so we're closing the previous. (the process stop here)
|
||||
self.update
|
||||
.download_and_install(
|
||||
self.update.app.config().tauri.bundle.updater.pubkey.clone(),
|
||||
on_event,
|
||||
)
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
/// Initializes the [`UpdateBuilder`] using the app configuration.
|
||||
pub fn builder<R: Runtime>(handle: AppHandle<R>) -> UpdateBuilder<R> {
|
||||
let package_info = handle.package_info().clone();
|
||||
|
||||
// prepare our endpoints
|
||||
let endpoints = handle
|
||||
.state::<UpdaterState>()
|
||||
.config
|
||||
.endpoints
|
||||
.iter()
|
||||
.map(|e| e.to_string())
|
||||
.collect::<Vec<String>>();
|
||||
|
||||
let mut builder = self::core::builder(handle.clone())
|
||||
.urls(&endpoints[..])
|
||||
.current_version(package_info.version);
|
||||
if let Some(target) = &handle.state::<crate::UpdaterState>().target {
|
||||
builder = builder.target(target);
|
||||
}
|
||||
UpdateBuilder { inner: builder }
|
||||
}
|
@ -1,118 +0,0 @@
|
||||
// Copyright 2019-2023 Tauri Programme within The Commons Conservancy
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
use ignore::WalkBuilder;
|
||||
use std::{fs, path};
|
||||
|
||||
use crate::Result;
|
||||
|
||||
/// Moves a file from the given path to the specified destination.
|
||||
///
|
||||
/// `source` and `dest` must be on the same filesystem.
|
||||
/// If `replace_using_temp` is specified, the destination file will be
|
||||
/// replaced using the given temporary path.
|
||||
///
|
||||
/// * Errors:
|
||||
/// * Io - copying / renaming
|
||||
#[derive(Debug)]
|
||||
pub struct Move<'a> {
|
||||
source: &'a path::Path,
|
||||
temp: Option<&'a path::Path>,
|
||||
}
|
||||
impl<'a> Move<'a> {
|
||||
/// Specify source file
|
||||
pub fn from_source(source: &'a path::Path) -> Move<'a> {
|
||||
Self { source, temp: None }
|
||||
}
|
||||
|
||||
/// If specified and the destination file already exists, the "destination"
|
||||
/// file will be moved to the given temporary location before the "source"
|
||||
/// file is moved to the "destination" file.
|
||||
///
|
||||
/// In the event of an `io` error while renaming "source" to "destination",
|
||||
/// the temporary file will be moved back to "destination".
|
||||
///
|
||||
/// The `temp` dir must be explicitly provided since `rename` operations require
|
||||
/// files to live on the same filesystem.
|
||||
#[allow(dead_code)]
|
||||
pub fn replace_using_temp(&mut self, temp: &'a path::Path) -> &mut Self {
|
||||
self.temp = Some(temp);
|
||||
self
|
||||
}
|
||||
|
||||
/// Move source file to specified destination (replace whole directory)
|
||||
pub fn to_dest(&self, dest: &path::Path) -> Result<()> {
|
||||
match self.temp {
|
||||
None => {
|
||||
fs::rename(self.source, dest)?;
|
||||
}
|
||||
Some(temp) => {
|
||||
if dest.exists() {
|
||||
fs::rename(dest, temp)?;
|
||||
if let Err(e) = fs::rename(self.source, dest) {
|
||||
fs::rename(temp, dest)?;
|
||||
return Err(e.into());
|
||||
}
|
||||
} else {
|
||||
fs::rename(self.source, dest)?;
|
||||
}
|
||||
}
|
||||
};
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Walk in the source and copy all files and create directories if needed by
|
||||
/// replacing existing elements. (equivalent to a cp -R)
|
||||
#[allow(dead_code)]
|
||||
pub fn walk_to_dest(&self, dest: &path::Path) -> Result<()> {
|
||||
match self.temp {
|
||||
None => {
|
||||
// got no temp -- no need to backup
|
||||
walkdir_and_copy(self.source, dest)?;
|
||||
}
|
||||
Some(temp) => {
|
||||
if dest.exists() {
|
||||
// we got temp and our dest exist, lets make a backup
|
||||
// of current files
|
||||
walkdir_and_copy(dest, temp)?;
|
||||
|
||||
if let Err(e) = walkdir_and_copy(self.source, dest) {
|
||||
// if we got something wrong we reset the dest with our backup
|
||||
fs::rename(temp, dest)?;
|
||||
return Err(e);
|
||||
}
|
||||
} else {
|
||||
// got temp but dest didnt exist
|
||||
walkdir_and_copy(self.source, dest)?;
|
||||
}
|
||||
}
|
||||
};
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
// Walk into the source and create directories, and copy files
|
||||
// Overwriting existing items but keeping untouched the files in the dest
|
||||
// not provided in the source.
|
||||
fn walkdir_and_copy(source: &path::Path, dest: &path::Path) -> Result<()> {
|
||||
let walkdir = WalkBuilder::new(source).hidden(false).build();
|
||||
|
||||
for entry in walkdir {
|
||||
// Check if it's a file
|
||||
|
||||
let element = entry?;
|
||||
let metadata = element.metadata()?;
|
||||
let destination = dest.join(element.path().strip_prefix(source)?);
|
||||
|
||||
// we make sure it's a directory and destination doesnt exist
|
||||
if metadata.is_dir() && !&destination.exists() {
|
||||
fs::create_dir_all(&destination)?;
|
||||
}
|
||||
|
||||
// we make sure it's a file
|
||||
if metadata.is_file() {
|
||||
fs::copy(element.path(), destination)?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
Loading…
Reference in new issue