1#![allow(non_snake_case, non_upper_case_globals)]
27
28#[derive(Clone, Debug)]
33struct PlatformTarget {
34 DownloadIdentifier:String,
37
38 ArchiveExtension:String,
40
41 TauriTargetTriple:String,
44}
45
46#[derive(Clone, Debug, PartialEq)]
49enum ArchiveType {
50 Zip,
51
52 TarGz,
53}
54
55#[derive(Deserialize, Debug)]
58struct NodeVersionInfo {
59 version:String,
60}
61
62#[derive(Clone, Debug)]
66struct DownloadTask {
67 SidecarName:String,
69
70 MajorVersion:String,
72
73 FullVersion:String,
75
76 DownloadURL:String,
78
79 TempParentDirectory:PathBuf,
81
82 DestinationDirectory:PathBuf,
84
85 ArchiveType:ArchiveType,
87
88 ExtractedFolderName:String,
90
91 TauriTargetTriple:String,
93}
94
95#[derive(Serialize, Deserialize, Debug, Default)]
100struct DownloadCache {
101 Entries:HashMap<String, String>,
105}
106
107impl DownloadCache {
108 fn Load(CachePath:&Path) -> Self {
111 if !CachePath.exists() {
112 info!("Cache file not found. A new one will be created.");
113
114 return DownloadCache::default();
115 }
116
117 let FileContents = match fs::read_to_string(CachePath) {
118 Ok(Contents) => Contents,
119
120 Err(Error) => {
121 warn!("Failed to read cache file: {}. Starting with an empty cache.", Error);
122
123 return DownloadCache::default();
124 },
125 };
126
127 match serde_json::from_str(&FileContents) {
128 Ok(Cache) => {
129 info!("Successfully loaded download cache.");
130
131 Cache
132 },
133
134 Err(Error) => {
135 warn!("Failed to parse cache file: {}. Starting with an empty cache.", Error);
136
137 DownloadCache::default()
138 },
139 }
140 }
141
142 fn Save(&self, CachePath:&Path) -> Result<()> {
146 let SortedEntries:BTreeMap<_, _> = self.Entries.iter().collect();
148
149 let CacheToSerialize = serde_json::json!({
151 "Entries": SortedEntries
152 });
153
154 let mut Buffer = Vec::new();
156
157 let Formatter = serde_json::ser::PrettyFormatter::with_indent(b"\t");
159
160 let mut Serializer = serde_json::Serializer::with_formatter(&mut Buffer, Formatter);
162
163 CacheToSerialize.serialize(&mut Serializer)?;
165
166 fs::write(CachePath, &Buffer)
168 .with_context(|| format!("Failed to write tab-formatted cache to {:?}", CachePath))?;
169
170 Ok(())
171 }
172}
173
174fn GetBaseSidecarDirectory() -> Result<PathBuf> {
182 let CurrentExePath = env::current_exe().context("Failed to get the path of the current executable.")?;
184
185 let BaseDirectory = CurrentExePath
189 .parent()
190 .and_then(|p| p.parent())
191 .and_then(|p| p.parent())
192 .context(
193 "Could not determine the base sidecar directory. Expected to be run from a subdirectory like \
194 'Target/release' within the sidecar project.",
195 )?;
196
197 Ok(BaseDirectory.to_path_buf())
198}
199
200fn GetPlatformMatrix() -> Vec<PlatformTarget> {
203 vec![
204 PlatformTarget {
205 DownloadIdentifier:"win-x64".to_string(),
206
207 ArchiveExtension:"zip".to_string(),
208
209 TauriTargetTriple:"x86_64-pc-windows-msvc".to_string(),
210 },
211 PlatformTarget {
212 DownloadIdentifier:"linux-x64".to_string(),
213
214 ArchiveExtension:"tar.gz".to_string(),
215
216 TauriTargetTriple:"x86_64-unknown-linux-gnu".to_string(),
217 },
218 PlatformTarget {
219 DownloadIdentifier:"linux-arm64".to_string(),
220
221 ArchiveExtension:"tar.gz".to_string(),
222
223 TauriTargetTriple:"aarch64-unknown-linux-gnu".to_string(),
224 },
225 PlatformTarget {
226 DownloadIdentifier:"darwin-x64".to_string(),
227
228 ArchiveExtension:"tar.gz".to_string(),
229
230 TauriTargetTriple:"x86_64-apple-darwin".to_string(),
231 },
232 PlatformTarget {
233 DownloadIdentifier:"darwin-arm64".to_string(),
234
235 ArchiveExtension:"tar.gz".to_string(),
236
237 TauriTargetTriple:"aarch64-apple-darwin".to_string(),
238 },
239 ]
240}
241
242fn GetSidecarsToFetch() -> HashMap<String, Vec<String>> {
245 let mut Sidecars = HashMap::new();
246
247 Sidecars.insert(
248 "NODE".to_string(),
249 vec!["24", "23", "22", "21", "20", "19", "18", "17", "16"]
250 .into_iter()
251 .map(String::from)
252 .collect(),
253 );
254
255 Sidecars
256}
257
258pub const LogEnv:&str = "RUST_LOG";
262
263fn UpdateGitattributes(BaseDirectory:&Path) -> Result<()> {
267 const GITATTRIBUTES_HEADER:&str = r#"################################################################################
268# Git LFS configuration for vendored Tauri Sidecars
269#
270# This file tells Git to use LFS (Large File Storage) for the heavy binary
271# files and modules downloaded by the sidecar vendoring script. This keeps the
272# main repository history small and fast.
273#
274# The `-text` attribute is used to prevent Git from normalizing line endings,
275# which is critical for binary files and scripts.
276#
277# This file is automatically managed by the sidecar vendor script.
278################################################################################
279
280# --- Rule Definitions ---"#;
281
282 const GITATTRIBUTES_RULES:&[&str] = &[
283 "**/NODE/**/bin/node filter=lfs diff=lfs merge=lfs -text",
284 "**/NODE/**/node.exe filter=lfs diff=lfs merge=lfs -text",
285 "**/NODE/**/bin/npm filter=lfs diff=lfs merge=lfs -text",
286 "**/NODE/**/bin/npx filter=lfs diff=lfs merge=lfs -text",
287 "**/NODE/**/bin/corepack filter=lfs diff=lfs merge=lfs -text",
288 "**/NODE/**/npm filter=lfs diff=lfs merge=lfs -text",
289 "**/NODE/**/npm.cmd filter=lfs diff=lfs merge=lfs -text",
290 "**/NODE/**/npx filter=lfs diff=lfs merge=lfs -text",
291 "**/NODE/**/npx.cmd filter=lfs diff=lfs merge=lfs -text",
292 "**/NODE/**/corepack filter=lfs diff=lfs merge=lfs -text",
293 "**/NODE/**/corepack.cmd filter=lfs diff=lfs merge=lfs -text",
294 "",
295 "# --- Rules for the SideCar build artifacts ---",
296 "",
297 "Target/debug/*.exe filter=lfs diff=lfs merge=lfs -text",
298 "Target/release/*.exe filter=lfs diff=lfs merge=lfs -text",
299 "",
300 "Target/debug/SideCar filter=lfs diff=lfs merge=lfs -text",
301 "Target/release/SideCar filter=lfs diff=lfs merge=lfs -text",
302 "",
303 "Target/debug/Download filter=lfs diff=lfs merge=lfs -text",
304 "Target/release/Download filter=lfs diff=lfs merge=lfs -text",
305 ];
306
307 let GitattributesPath = BaseDirectory.join(".gitattributes");
308
309 if !GitattributesPath.exists() {
310 info!("Creating .gitattributes file to track binaries with Git LFS.");
311
312 let mut File = File::create(&GitattributesPath)
313 .with_context(|| format!("Failed to create .gitattributes file at {:?}", GitattributesPath))?;
314
315 writeln!(File, "{}", GITATTRIBUTES_HEADER)?;
316
317 for Rule in GITATTRIBUTES_RULES {
318 writeln!(File, "{}", Rule)?;
320 }
321 } else {
322 info!(".gitattributes file found. Verifying LFS rules...");
323
324 let Content = fs::read_to_string(&GitattributesPath)?;
325
326 let MissingRules:Vec<_> = GITATTRIBUTES_RULES
327 .iter()
328 .filter(|rule| !rule.is_empty() && !rule.starts_with('#'))
330 .filter(|rule| !Content.contains(*rule))
331 .collect();
332
333 if !MissingRules.is_empty() {
334 info!("Adding {} missing LFS rules to .gitattributes.", MissingRules.len());
335
336 let mut File = fs::OpenOptions::new()
337 .append(true)
338 .open(&GitattributesPath)
339 .with_context(|| format!("Failed to open .gitattributes for appending at {:?}", GitattributesPath))?;
340
341 writeln!(File, "\n\n# --- Rules Automatically Added by Vendor Script ---")?;
342
343 for Rule in MissingRules {
344 writeln!(File, "{}", Rule)?;
345 }
346 } else {
347 info!(".gitattributes is already up to date.");
348 }
349 }
350
351 Ok(())
352}
353
354async fn FetchNodeVersions(Client:&Client) -> Result<Vec<NodeVersionInfo>> {
358 info!("Fetching Node.js version index for resolving versions...");
359
360 let Response = Client
361 .get("https://nodejs.org/dist/index.json")
362 .send()
363 .await
364 .context("Failed to send request to Node.js version index.")?;
365
366 if !Response.status().is_success() {
367 return Err(anyhow!("Received non-success status from Node.js index: {}", Response.status()));
368 }
369
370 let Versions = Response
371 .json::<Vec<NodeVersionInfo>>()
372 .await
373 .context("Failed to parse Node.js version index JSON.")?;
374
375 Ok(Versions)
376}
377
378fn ResolveLatestPatchVersion(MajorVersion:&str, AllVersions:&[NodeVersionInfo]) -> Option<String> {
381 let VersionPrefix = format!("v{}.", MajorVersion);
382
383 AllVersions
384 .iter()
385 .find(|v| v.version.starts_with(&VersionPrefix))
386 .map(|v| v.version.clone())
387}
388
389async fn DownloadFile(Client:&Client, URL:&str, DestinationPath:&Path) -> Result<()> {
391 let mut Response = Client.get(URL).send().await?.error_for_status()?;
392
393 let mut DestinationFile =
394 File::create(DestinationPath).with_context(|| format!("Failed to create file at {:?}", DestinationPath))?;
395
396 while let Some(Chunk) = Response.chunk().await? {
398 DestinationFile.write_all(&Chunk)?;
399 }
400
401 Ok(())
402}
403
404fn ExtractArchive(ArchiveType:&ArchiveType, ArchivePath:&Path, ExtractionDirectory:&Path) -> Result<()> {
408 info!("Performing a full extraction of the archive...");
409
410 match ArchiveType {
411 ArchiveType::Zip => {
412 let File = File::open(ArchivePath)?;
413
414 let mut Archive = zip::ZipArchive::new(File)?;
415
416 Archive.extract(ExtractionDirectory)?;
417 },
418
419 ArchiveType::TarGz => {
420 let File = File::open(ArchivePath)?;
421
422 let Decompressor = flate2::read::GzDecoder::new(File);
423
424 let mut Archive = tar::Archive::new(Decompressor);
425
426 Archive.unpack(ExtractionDirectory)?;
427 },
428 }
429
430 Ok(())
431}
432
433async fn ProcessDownloadTask(Task:DownloadTask, Client:Client, Cache:Arc<Mutex<DownloadCache>>) -> Result<()> {
436 let TempDirectory = Builder::new()
438 .prefix("SideCar-Download-")
439 .tempdir_in(&Task.TempParentDirectory)
440 .context("Failed to create temporary directory.")?;
441
442 let ArchiveName = Task.DownloadURL.split('/').last().unwrap_or("Download.tmp");
443
444 let ArchivePath = TempDirectory.path().join(ArchiveName);
445
446 info!(
447 " [{}/{}] Downloading from: {}",
448 Task.TauriTargetTriple, Task.SidecarName, Task.DownloadURL
449 );
450
451 if let Err(Error) = DownloadFile(&Client, &Task.DownloadURL, &ArchivePath).await {
452 error!(
453 " [{}/{}] Failed to download {}: {}",
454 Task.TauriTargetTriple, Task.SidecarName, ArchiveName, Error
455 );
456
457 return Err(Error.into());
458 }
459
460 info!(" [{}/{}] Extracting archive...", Task.TauriTargetTriple, Task.SidecarName);
461
462 if let Err(Error) = ExtractArchive(&Task.ArchiveType, &ArchivePath, TempDirectory.path()) {
463 error!(
464 " [{}/{}] Failed to extract {}: {}",
465 Task.TauriTargetTriple, Task.SidecarName, ArchiveName, Error
466 );
467
468 return Err(Error.into());
469 }
470
471 let ExtractedPath = TempDirectory.path().join(&Task.ExtractedFolderName);
472
473 if !ExtractedPath.exists() {
474 let ErrorMessage = format!(" Could not find extracted folder: {:?}", ExtractedPath);
475
476 error!("{}", ErrorMessage);
477
478 return Err(anyhow!(ErrorMessage));
479 }
480
481 if Task.DestinationDirectory.exists() {
483 info!(" Removing old version at: {:?}", Task.DestinationDirectory);
484
485 fs::remove_dir_all(&Task.DestinationDirectory)?;
486 }
487
488 if let Some(Parent) = Task.DestinationDirectory.parent() {
490 fs::create_dir_all(Parent)?;
491 }
492
493 info!(" Installing to: {:?}", Task.DestinationDirectory);
494
495 fs::rename(&ExtractedPath, &Task.DestinationDirectory).with_context(|| {
496 format!(
497 "Failed to rename/move extracted directory from {:?} to {:?}",
498 ExtractedPath, Task.DestinationDirectory
499 )
500 })?;
501
502 let CacheKey = format!("{}/{}/{}", Task.TauriTargetTriple, Task.SidecarName, Task.MajorVersion);
504
505 let mut LockedCache = Cache.lock().unwrap();
506
507 LockedCache.Entries.insert(CacheKey, Task.FullVersion.clone());
508
509 info!(
510 " v{} ({}) for '{}' is now up to date.",
511 Task.MajorVersion, Task.FullVersion, Task.TauriTargetTriple
512 );
513
514 Ok(())
515}
516
517pub fn Logger() {
519 let LevelText = env::var(LogEnv).unwrap_or_else(|_| "info".to_string());
520
521 let LogLevel = LevelText.parse::<LevelFilter>().unwrap_or(LevelFilter::Info);
522
523 env_logger::Builder::new()
524 .filter_level(LogLevel)
525 .format(|Buffer, Record| {
526 let LevelStyle = match Record.level() {
527 log::Level::Error => "ERROR".red().bold(),
528
529 log::Level::Warn => "WARN".yellow().bold(),
530
531 log::Level::Info => "INFO".green(),
532
533 log::Level::Debug => "DEBUG".blue(),
534
535 log::Level::Trace => "TRACE".magenta(),
536 };
537
538 writeln!(Buffer, "[{}] [{}]: {}", "Download".red(), LevelStyle, Record.args())
539 })
540 .parse_default_env()
541 .init();
542}
543
544#[tokio::main]
545pub async fn Fn() -> Result<()> {
546 Logger();
547
548 info!("Starting Universal Sidecar vendoring process...");
549
550 let BaseSidecarDirectory = GetBaseSidecarDirectory()?;
552
553 UpdateGitattributes(&BaseSidecarDirectory)?;
555
556 let TempDownloadsDirectory = BaseSidecarDirectory.join("Temporary");
558
559 fs::create_dir_all(&TempDownloadsDirectory)
560 .with_context(|| format!("Failed to create temporary directory at {:?}", TempDownloadsDirectory))?;
561
562 let CachePath = BaseSidecarDirectory.join("Cache.json");
563
564 let Cache = Arc::new(Mutex::new(DownloadCache::Load(&CachePath)));
565
566 let HttpClient = Client::new();
567
568 let PlatformMatrix = GetPlatformMatrix();
569
570 let SidecarsToFetch = GetSidecarsToFetch();
571
572 let NodeVersions = FetchNodeVersions(&HttpClient).await?;
574
575 let mut TasksToRun = Vec::new();
576
577 for Platform in &PlatformMatrix {
580 info!("--- Processing architecture: '{}' ---", Platform.TauriTargetTriple);
581
582 for (SidecarName, MajorVersions) in &SidecarsToFetch {
583 info!(" -> Processing sidecar: '{}'", SidecarName);
584
585 for MajorVersion in MajorVersions {
586 let DestinationDirectory = BaseSidecarDirectory
587 .join(&Platform.TauriTargetTriple)
588 .join(SidecarName)
589 .join(MajorVersion);
590
591 if SidecarName == "NODE" {
593 let FullVersion = match ResolveLatestPatchVersion(MajorVersion, &NodeVersions) {
594 Some(Version) => Version,
595
596 None => {
597 warn!(
598 " Could not resolve a specific version for Node.js v{}. Skipping.",
599 MajorVersion
600 );
601
602 continue;
603 },
604 };
605
606 let CacheKey = format!("{}/{}/{}", &Platform.TauriTargetTriple, SidecarName, MajorVersion);
608
609 let CachedVersion = Cache.lock().unwrap().Entries.get(&CacheKey).cloned();
610
611 if Some(FullVersion.clone()) == CachedVersion {
612 info!(" v{} ({}) is already up to date, skipping.", MajorVersion, FullVersion);
613
614 continue;
615 }
616
617 if CachedVersion.is_some() {
618 info!(
619 " Found newer patch for v{}: {} -> {}. Scheduling update.",
620 MajorVersion,
621 CachedVersion.unwrap(),
622 FullVersion
623 );
624 } else {
625 info!(" Processing v{} (resolved to {})...", MajorVersion, FullVersion);
626 }
627
628 let ArchiveExtension = &Platform.ArchiveExtension;
629
630 let ArchiveName =
631 format!("node-{}-{}.{}", FullVersion, Platform.DownloadIdentifier, ArchiveExtension);
632
633 let DownloadURL = format!("https://nodejs.org/dist/{}/{}", FullVersion, ArchiveName);
634
635 let ExtractedFolderName = format!("node-{}-{}", FullVersion, Platform.DownloadIdentifier);
636
637 let Task = DownloadTask {
638 SidecarName:SidecarName.clone(),
639
640 MajorVersion:MajorVersion.clone(),
641
642 FullVersion,
643
644 DownloadURL,
645
646 TempParentDirectory:TempDownloadsDirectory.clone(),
647
648 DestinationDirectory,
649
650 ArchiveType:if ArchiveExtension == "zip" { ArchiveType::Zip } else { ArchiveType::TarGz },
651
652 ExtractedFolderName,
653
654 TauriTargetTriple:Platform.TauriTargetTriple.clone(),
655 };
656
657 TasksToRun.push(Task);
658 }
659
660 }
663 }
664 }
665
666 if TasksToRun.is_empty() {
668 info!("All sidecar binaries are already up to date.");
669 } else {
670 info!("Found {} tasks to run. Starting concurrent downloads...", TasksToRun.len());
671
672 let NumberOfConcurrentJobs = num_cpus::get().min(8);
674
675 let Results = stream::iter(TasksToRun)
678 .map(|Task| {
679 let Client = HttpClient.clone();
680
681 let Cache = Arc::clone(&Cache);
682
683 tokio::spawn(async move { ProcessDownloadTask(Task, Client, Cache).await })
684 })
685 .buffer_unordered(NumberOfConcurrentJobs)
686 .collect::<Vec<_>>()
687 .await;
688
689 let mut ErrorsEncountered = 0;
691
692 for Result in Results {
693 if let Err(JoinError) = Result {
695 error!("A download task panicked or was cancelled: {}", JoinError);
696
697 ErrorsEncountered += 1;
698 } else if let Ok(Err(AppError)) = Result {
699 error!("A download task failed: {}", AppError);
702
703 ErrorsEncountered += 1;
704 }
705 }
706
707 if ErrorsEncountered > 0 {
708 error!("Completed with {} errors.", ErrorsEncountered);
709 }
710 }
711
712 info!("Saving updated cache...");
714
715 Cache.lock().unwrap().Save(&CachePath)?;
716
717 info!("All sidecar binaries have been successfully processed and organized.");
718
719 Ok(())
720}
721
722#[allow(unused)]
724fn main() {
725 if let Err(Error) = Fn() {
727 error!("The application encountered a fatal error: {}", Error);
729
730 std::process::exit(1);
731 }
732}
733
734use std::{
736 collections::{BTreeMap, HashMap},
737 env,
738 fs::{self, File},
739 io::Write,
740 path::{Path, PathBuf},
741 sync::{Arc, Mutex},
742};
743
744use anyhow::{Context, Result, anyhow};
745use colored::*;
746use futures::stream::{self, StreamExt};
747use log::{LevelFilter, error, info, warn};
748use reqwest::Client;
749use serde::{Deserialize, Serialize};
750use tempfile::Builder;