Download/
Download.rs

1// ==============================================================================
2// Universal Sidecar Vendor - Rust Edition
3//
4// This program automates downloading and organizing full distributions of
5// various sidecar runtimes (like Node.js) for a Tauri application. It is a Rust
6// rewrite of the original shell script, enhanced with modern features.
7//
8// Key Features:
9//   - Asynchronous, Concurrent Downloads: Leverages Tokio to download multiple
10//     binaries in parallel, significantly speeding up the process.
11//   - Intelligent Caching: Maintains a `Cache.json` file to track downloaded
12//     versions. It automatically detects if a newer patch version is available
13//     for a requested major version and updates the binary.
14//   - Git LFS Management: Automatically creates or updates the `.gitattributes`
15//     file to ensure large binaries are tracked by Git LFS.
16//   - Extensible Design: Easily configured to support new sidecars, versions,
17//     and platforms.
18//   - Robust Error Handling: Uses `anyhow` for clear and concise error
19//     reporting.
20//   - Preserved File Structure: The final output directory structure remains
21//     identical to the original script (`Architecture/SidecarName/Version`).
22//
23// ==============================================================================
24
25// Allow non_snake_case to meet the user's naming convention requirement.
26#![allow(non_snake_case, non_upper_case_globals)]
27
28// --- Type Definitions and Structs ---
29
30/// Represents a single platform target for which binaries will be downloaded.
31/// This struct holds all the necessary identifiers for a given platform.
32#[derive(Clone, Debug)]
33struct PlatformTarget {
34	/// The identifier used in the download URL (e.g., "win-x64",
35	/// "linux-arm64").
36	DownloadIdentifier:String,
37
38	/// The file extension of the archive (e.g., "zip", "tar.gz").
39	ArchiveExtension:String,
40
41	/// The official Tauri target triple for this platform (e.g.,
42	/// "x86_64-pc-windows-msvc").
43	TauriTargetTriple:String,
44}
45
46/// Defines the type of archive being handled, which determines the extraction
47/// logic.
48#[derive(Clone, Debug, PartialEq)]
49enum ArchiveType {
50	Zip,
51
52	TarGz,
53}
54
55/// Represents a specific version of Node.js as returned by the official index.
56/// Used for deserializing the JSON response from `nodejs.org`.
57#[derive(Deserialize, Debug)]
58struct NodeVersionInfo {
59	version:String,
60}
61
62/// Contains all the necessary information to perform a single download and
63/// installation task. An instance of this struct is created for each binary
64/// that needs to be downloaded.
65#[derive(Clone, Debug)]
66struct DownloadTask {
67	/// The name of the sidecar (e.g., "NODE").
68	SidecarName:String,
69
70	/// The major version string requested (e.g., "24").
71	MajorVersion:String,
72
73	/// The full, resolved version string (e.g., "v24.0.0").
74	FullVersion:String,
75
76	/// The complete URL to download the archive from.
77	DownloadURL:String,
78
79	/// The directory where temporary folders for this task will be created.
80	TempParentDirectory:PathBuf,
81
82	/// The final destination directory for the extracted binaries.
83	DestinationDirectory:PathBuf,
84
85	/// The type of archive to be downloaded.
86	ArchiveType:ArchiveType,
87
88	/// The name of the root folder inside the archive once extracted.
89	ExtractedFolderName:String,
90
91	/// The Tauri target triple for this download task.
92	TauriTargetTriple:String,
93}
94
95/// Represents the structure of the `Cache.json` file.
96/// It uses a HashMap to map a unique key (representing a specific
97/// sidecar/version/platform) to the full version string that was last
98/// downloaded.
99#[derive(Serialize, Deserialize, Debug, Default)]
100struct DownloadCache {
101	/// The core data structure for the cache.
102	/// Key: A unique string like "x86_64-pc-windows-msvc/NODE/24".
103	/// Value: The full version string, like "v24.0.0".
104	Entries:HashMap<String, String>,
105}
106
107impl DownloadCache {
108	/// Loads the cache from the `Cache.json` file in the base sidecar
109	/// directory. If the file doesn't exist, it returns a new, empty cache.
110	fn Load(CachePath:&Path) -> Self {
111		if !CachePath.exists() {
112			info!("Cache file not found. A new one will be created.");
113
114			return DownloadCache::default();
115		}
116
117		let FileContents = match fs::read_to_string(CachePath) {
118			Ok(Contents) => Contents,
119
120			Err(Error) => {
121				warn!("Failed to read cache file: {}. Starting with an empty cache.", Error);
122
123				return DownloadCache::default();
124			},
125		};
126
127		match serde_json::from_str(&FileContents) {
128			Ok(Cache) => {
129				info!("Successfully loaded download cache.");
130
131				Cache
132			},
133
134			Err(Error) => {
135				warn!("Failed to parse cache file: {}. Starting with an empty cache.", Error);
136
137				DownloadCache::default()
138			},
139		}
140	}
141
142	/// Saves the current state of the cache to the `Cache.json` file.
143	/// The JSON is pretty-printed with tabs for indentation.
144	/// Entries are sorted alphabetically by key for consistency.
145	fn Save(&self, CachePath:&Path) -> Result<()> {
146		// Create a BTreeMap to sort entries alphabetically by key
147		let SortedEntries:BTreeMap<_, _> = self.Entries.iter().collect();
148
149		// Create a temporary struct to hold the sorted entries for serialization
150		let CacheToSerialize = serde_json::json!({
151			"Entries": SortedEntries
152		});
153
154		// Create an in-memory buffer to write the serialized JSON to.
155		let mut Buffer = Vec::new();
156
157		// Create a formatter that uses a tab character for indentation.
158		let Formatter = serde_json::ser::PrettyFormatter::with_indent(b"\t");
159
160		// Create a serializer with our custom formatter.
161		let mut Serializer = serde_json::Serializer::with_formatter(&mut Buffer, Formatter);
162
163		// Serialize the sorted cache data into the buffer.
164		CacheToSerialize.serialize(&mut Serializer)?;
165
166		// Write the buffer's contents to the actual file on disk.
167		fs::write(CachePath, &Buffer)
168			.with_context(|| format!("Failed to write tab-formatted cache to {:?}", CachePath))?;
169
170		Ok(())
171	}
172}
173
174// --- Configuration ---
175
176/// Returns the root directory where all sidecars will be stored.
177/// This is determined dynamically by navigating up from the executable's
178/// location. It assumes the executable is located in a path like
179/// `.../SideCar/Target/release/`, and it will resolve the base path to
180/// `.../SideCar/`.
181fn GetBaseSidecarDirectory() -> Result<PathBuf> {
182	// Get the full path to the currently running executable.
183	let CurrentExePath = env::current_exe().context("Failed to get the path of the current executable.")?;
184
185	// The first .parent() gets the directory containing the exe (e.g., `release`).
186	// We then navigate up two more levels to get to the intended `SideCar`
187	// directory.
188	let BaseDirectory = CurrentExePath
189		.parent()
190		.and_then(|p| p.parent())
191		.and_then(|p| p.parent())
192		.context(
193			"Could not determine the base sidecar directory. Expected to be run from a subdirectory like \
194			 'Target/release' within the sidecar project.",
195		)?;
196
197	Ok(BaseDirectory.to_path_buf())
198}
199
200/// Defines the matrix of platforms to target. Each entry specifies how to
201/// download and identify binaries for a specific architecture.
202fn GetPlatformMatrix() -> Vec<PlatformTarget> {
203	vec![
204		PlatformTarget {
205			DownloadIdentifier:"win-x64".to_string(),
206
207			ArchiveExtension:"zip".to_string(),
208
209			TauriTargetTriple:"x86_64-pc-windows-msvc".to_string(),
210		},
211		PlatformTarget {
212			DownloadIdentifier:"linux-x64".to_string(),
213
214			ArchiveExtension:"tar.gz".to_string(),
215
216			TauriTargetTriple:"x86_64-unknown-linux-gnu".to_string(),
217		},
218		PlatformTarget {
219			DownloadIdentifier:"linux-arm64".to_string(),
220
221			ArchiveExtension:"tar.gz".to_string(),
222
223			TauriTargetTriple:"aarch64-unknown-linux-gnu".to_string(),
224		},
225		PlatformTarget {
226			DownloadIdentifier:"darwin-x64".to_string(),
227
228			ArchiveExtension:"tar.gz".to_string(),
229
230			TauriTargetTriple:"x86_64-apple-darwin".to_string(),
231		},
232		PlatformTarget {
233			DownloadIdentifier:"darwin-arm64".to_string(),
234
235			ArchiveExtension:"tar.gz".to_string(),
236
237			TauriTargetTriple:"aarch64-apple-darwin".to_string(),
238		},
239	]
240}
241
242/// Defines which sidecars and versions to fetch. This structure makes it
243/// easy to add more sidecars like Deno in the future.
244fn GetSidecarsToFetch() -> HashMap<String, Vec<String>> {
245	let mut Sidecars = HashMap::new();
246
247	Sidecars.insert(
248		"NODE".to_string(),
249		vec!["24", "23", "22", "21", "20", "19", "18", "17", "16"]
250			.into_iter()
251			.map(String::from)
252			.collect(),
253	);
254
255	Sidecars
256}
257
258// --- Helper Functions ---
259
260/// Environment variable for setting the log level.
261pub const LogEnv:&str = "RUST_LOG";
262
263/// Manages the `.gitattributes` file to ensure binaries are tracked by Git LFS.
264/// If the file does not exist, it is created. If it exists, missing rules are
265/// appended.
266fn UpdateGitattributes(BaseDirectory:&Path) -> Result<()> {
267	const GITATTRIBUTES_HEADER:&str = r#"################################################################################
268# Git LFS configuration for vendored Tauri Sidecars
269#
270# This file tells Git to use LFS (Large File Storage) for the heavy binary
271# files and modules downloaded by the sidecar vendoring script. This keeps the
272# main repository history small and fast.
273#
274# The `-text` attribute is used to prevent Git from normalizing line endings,
275# which is critical for binary files and scripts.
276#
277# This file is automatically managed by the sidecar vendor script.
278################################################################################
279
280# --- Rule Definitions ---"#;
281
282	const GITATTRIBUTES_RULES:&[&str] = &[
283		"**/NODE/**/bin/node filter=lfs diff=lfs merge=lfs -text",
284		"**/NODE/**/node.exe filter=lfs diff=lfs merge=lfs -text",
285		"**/NODE/**/bin/npm filter=lfs diff=lfs merge=lfs -text",
286		"**/NODE/**/bin/npx filter=lfs diff=lfs merge=lfs -text",
287		"**/NODE/**/bin/corepack filter=lfs diff=lfs merge=lfs -text",
288		"**/NODE/**/npm filter=lfs diff=lfs merge=lfs -text",
289		"**/NODE/**/npm.cmd filter=lfs diff=lfs merge=lfs -text",
290		"**/NODE/**/npx filter=lfs diff=lfs merge=lfs -text",
291		"**/NODE/**/npx.cmd filter=lfs diff=lfs merge=lfs -text",
292		"**/NODE/**/corepack filter=lfs diff=lfs merge=lfs -text",
293		"**/NODE/**/corepack.cmd filter=lfs diff=lfs merge=lfs -text",
294		"",
295		"# --- Rules for the SideCar build artifacts ---",
296		"",
297		"Target/debug/*.exe filter=lfs diff=lfs merge=lfs -text",
298		"Target/release/*.exe filter=lfs diff=lfs merge=lfs -text",
299		"",
300		"Target/debug/SideCar filter=lfs diff=lfs merge=lfs -text",
301		"Target/release/SideCar filter=lfs diff=lfs merge=lfs -text",
302		"",
303		"Target/debug/Download filter=lfs diff=lfs merge=lfs -text",
304		"Target/release/Download filter=lfs diff=lfs merge=lfs -text",
305	];
306
307	let GitattributesPath = BaseDirectory.join(".gitattributes");
308
309	if !GitattributesPath.exists() {
310		info!("Creating .gitattributes file to track binaries with Git LFS.");
311
312		let mut File = File::create(&GitattributesPath)
313			.with_context(|| format!("Failed to create .gitattributes file at {:?}", GitattributesPath))?;
314
315		writeln!(File, "{}", GITATTRIBUTES_HEADER)?;
316
317		for Rule in GITATTRIBUTES_RULES {
318			// This will write a blank line for any empty strings in the array
319			writeln!(File, "{}", Rule)?;
320		}
321	} else {
322		info!(".gitattributes file found. Verifying LFS rules...");
323
324		let Content = fs::read_to_string(&GitattributesPath)?;
325
326		let MissingRules:Vec<_> = GITATTRIBUTES_RULES
327			.iter()
328			// Filter out blank lines and comments from the check
329			.filter(|rule| !rule.is_empty() && !rule.starts_with('#'))
330			.filter(|rule| !Content.contains(*rule))
331			.collect();
332
333		if !MissingRules.is_empty() {
334			info!("Adding {} missing LFS rules to .gitattributes.", MissingRules.len());
335
336			let mut File = fs::OpenOptions::new()
337				.append(true)
338				.open(&GitattributesPath)
339				.with_context(|| format!("Failed to open .gitattributes for appending at {:?}", GitattributesPath))?;
340
341			writeln!(File, "\n\n# --- Rules Automatically Added by Vendor Script ---")?;
342
343			for Rule in MissingRules {
344				writeln!(File, "{}", Rule)?;
345			}
346		} else {
347			info!(".gitattributes is already up to date.");
348		}
349	}
350
351	Ok(())
352}
353
354// --- Core Logic ---
355
356/// Fetches the official Node.js versions index from nodejs.org.
357async fn FetchNodeVersions(Client:&Client) -> Result<Vec<NodeVersionInfo>> {
358	info!("Fetching Node.js version index for resolving versions...");
359
360	let Response = Client
361		.get("https://nodejs.org/dist/index.json")
362		.send()
363		.await
364		.context("Failed to send request to Node.js version index.")?;
365
366	if !Response.status().is_success() {
367		return Err(anyhow!("Received non-success status from Node.js index: {}", Response.status()));
368	}
369
370	let Versions = Response
371		.json::<Vec<NodeVersionInfo>>()
372		.await
373		.context("Failed to parse Node.js version index JSON.")?;
374
375	Ok(Versions)
376}
377
378/// Resolves a major version string (e.g., "22") to the latest full patch
379/// version (e.g., "v22.3.0") using the fetched version index.
380fn ResolveLatestPatchVersion(MajorVersion:&str, AllVersions:&[NodeVersionInfo]) -> Option<String> {
381	let VersionPrefix = format!("v{}.", MajorVersion);
382
383	AllVersions
384		.iter()
385		.find(|v| v.version.starts_with(&VersionPrefix))
386		.map(|v| v.version.clone())
387}
388
389/// Downloads a file from a URL to a specified path.
390async fn DownloadFile(Client:&Client, URL:&str, DestinationPath:&Path) -> Result<()> {
391	let mut Response = Client.get(URL).send().await?.error_for_status()?;
392
393	let mut DestinationFile =
394		File::create(DestinationPath).with_context(|| format!("Failed to create file at {:?}", DestinationPath))?;
395
396	// Stream the download to handle large files without high memory usage.
397	while let Some(Chunk) = Response.chunk().await? {
398		DestinationFile.write_all(&Chunk)?;
399	}
400
401	Ok(())
402}
403
404/// Extracts the contents of a downloaded archive to a target directory.
405/// This function now performs a full extraction to ensure a complete
406/// distribution.
407fn ExtractArchive(ArchiveType:&ArchiveType, ArchivePath:&Path, ExtractionDirectory:&Path) -> Result<()> {
408	info!("Performing a full extraction of the archive...");
409
410	match ArchiveType {
411		ArchiveType::Zip => {
412			let File = File::open(ArchivePath)?;
413
414			let mut Archive = zip::ZipArchive::new(File)?;
415
416			Archive.extract(ExtractionDirectory)?;
417		},
418
419		ArchiveType::TarGz => {
420			let File = File::open(ArchivePath)?;
421
422			let Decompressor = flate2::read::GzDecoder::new(File);
423
424			let mut Archive = tar::Archive::new(Decompressor);
425
426			Archive.unpack(ExtractionDirectory)?;
427		},
428	}
429
430	Ok(())
431}
432
433/// The main asynchronous function for processing a single download task.
434/// This function is designed to be run concurrently for multiple tasks.
435async fn ProcessDownloadTask(Task:DownloadTask, Client:Client, Cache:Arc<Mutex<DownloadCache>>) -> Result<()> {
436	// Create the temporary directory inside the designated "Temporary" subfolder.
437	let TempDirectory = Builder::new()
438		.prefix("SideCar-Download-")
439		.tempdir_in(&Task.TempParentDirectory)
440		.context("Failed to create temporary directory.")?;
441
442	let ArchiveName = Task.DownloadURL.split('/').last().unwrap_or("Download.tmp");
443
444	let ArchivePath = TempDirectory.path().join(ArchiveName);
445
446	info!(
447		"      [{}/{}] Downloading from: {}",
448		Task.TauriTargetTriple, Task.SidecarName, Task.DownloadURL
449	);
450
451	if let Err(Error) = DownloadFile(&Client, &Task.DownloadURL, &ArchivePath).await {
452		error!(
453			"      [{}/{}] Failed to download {}: {}",
454			Task.TauriTargetTriple, Task.SidecarName, ArchiveName, Error
455		);
456
457		return Err(Error.into());
458	}
459
460	info!("      [{}/{}] Extracting archive...", Task.TauriTargetTriple, Task.SidecarName);
461
462	if let Err(Error) = ExtractArchive(&Task.ArchiveType, &ArchivePath, TempDirectory.path()) {
463		error!(
464			"      [{}/{}] Failed to extract {}: {}",
465			Task.TauriTargetTriple, Task.SidecarName, ArchiveName, Error
466		);
467
468		return Err(Error.into());
469	}
470
471	let ExtractedPath = TempDirectory.path().join(&Task.ExtractedFolderName);
472
473	if !ExtractedPath.exists() {
474		let ErrorMessage = format!("      Could not find extracted folder: {:?}", ExtractedPath);
475
476		error!("{}", ErrorMessage);
477
478		return Err(anyhow!(ErrorMessage));
479	}
480
481	// If the destination directory already exists, remove it.
482	if Task.DestinationDirectory.exists() {
483		info!("      Removing old version at: {:?}", Task.DestinationDirectory);
484
485		fs::remove_dir_all(&Task.DestinationDirectory)?;
486	}
487
488	// Ensure the parent of the final destination exists.
489	if let Some(Parent) = Task.DestinationDirectory.parent() {
490		fs::create_dir_all(Parent)?;
491	}
492
493	info!("      Installing to: {:?}", Task.DestinationDirectory);
494
495	fs::rename(&ExtractedPath, &Task.DestinationDirectory).with_context(|| {
496		format!(
497			"Failed to rename/move extracted directory from {:?} to {:?}",
498			ExtractedPath, Task.DestinationDirectory
499		)
500	})?;
501
502	// Update the cache with the new version.
503	let CacheKey = format!("{}/{}/{}", Task.TauriTargetTriple, Task.SidecarName, Task.MajorVersion);
504
505	let mut LockedCache = Cache.lock().unwrap();
506
507	LockedCache.Entries.insert(CacheKey, Task.FullVersion.clone());
508
509	info!(
510		"    v{} ({}) for '{}' is now up to date.",
511		Task.MajorVersion, Task.FullVersion, Task.TauriTargetTriple
512	);
513
514	Ok(())
515}
516
517/// Sets up the global logger for the application.
518pub fn Logger() {
519	let LevelText = env::var(LogEnv).unwrap_or_else(|_| "info".to_string());
520
521	let LogLevel = LevelText.parse::<LevelFilter>().unwrap_or(LevelFilter::Info);
522
523	env_logger::Builder::new()
524		.filter_level(LogLevel)
525		.format(|Buffer, Record| {
526			let LevelStyle = match Record.level() {
527				log::Level::Error => "ERROR".red().bold(),
528
529				log::Level::Warn => "WARN".yellow().bold(),
530
531				log::Level::Info => "INFO".green(),
532
533				log::Level::Debug => "DEBUG".blue(),
534
535				log::Level::Trace => "TRACE".magenta(),
536			};
537
538			writeln!(Buffer, "[{}] [{}]: {}", "Download".red(), LevelStyle, Record.args())
539		})
540		.parse_default_env()
541		.init();
542}
543
544#[tokio::main]
545pub async fn Fn() -> Result<()> {
546	Logger();
547
548	info!("Starting Universal Sidecar vendoring process...");
549
550	// --- Setup ---
551	let BaseSidecarDirectory = GetBaseSidecarDirectory()?;
552
553	// Manage the .gitattributes file for Git LFS.
554	UpdateGitattributes(&BaseSidecarDirectory)?;
555
556	// Define and create the dedicated directory for temporary downloads.
557	let TempDownloadsDirectory = BaseSidecarDirectory.join("Temporary");
558
559	fs::create_dir_all(&TempDownloadsDirectory)
560		.with_context(|| format!("Failed to create temporary directory at {:?}", TempDownloadsDirectory))?;
561
562	let CachePath = BaseSidecarDirectory.join("Cache.json");
563
564	let Cache = Arc::new(Mutex::new(DownloadCache::Load(&CachePath)));
565
566	let HttpClient = Client::new();
567
568	let PlatformMatrix = GetPlatformMatrix();
569
570	let SidecarsToFetch = GetSidecarsToFetch();
571
572	// Fetch Node versions once to be used by all tasks.
573	let NodeVersions = FetchNodeVersions(&HttpClient).await?;
574
575	let mut TasksToRun = Vec::new();
576
577	// --- Task Generation Phase (Sequential) ---
578	// First, we determine which downloads are necessary by checking the cache.
579	for Platform in &PlatformMatrix {
580		info!("--- Processing architecture: '{}' ---", Platform.TauriTargetTriple);
581
582		for (SidecarName, MajorVersions) in &SidecarsToFetch {
583			info!("  -> Processing sidecar: '{}'", SidecarName);
584
585			for MajorVersion in MajorVersions {
586				let DestinationDirectory = BaseSidecarDirectory
587					.join(&Platform.TauriTargetTriple)
588					.join(SidecarName)
589					.join(MajorVersion);
590
591				// --- Sidecar-Specific Download Logic ---
592				if SidecarName == "NODE" {
593					let FullVersion = match ResolveLatestPatchVersion(MajorVersion, &NodeVersions) {
594						Some(Version) => Version,
595
596						None => {
597							warn!(
598								"      Could not resolve a specific version for Node.js v{}. Skipping.",
599								MajorVersion
600							);
601
602							continue;
603						},
604					};
605
606					// Check cache to see if we need to download/update.
607					let CacheKey = format!("{}/{}/{}", &Platform.TauriTargetTriple, SidecarName, MajorVersion);
608
609					let CachedVersion = Cache.lock().unwrap().Entries.get(&CacheKey).cloned();
610
611					if Some(FullVersion.clone()) == CachedVersion {
612						info!("    v{} ({}) is already up to date, skipping.", MajorVersion, FullVersion);
613
614						continue;
615					}
616
617					if CachedVersion.is_some() {
618						info!(
619							"    Found newer patch for v{}: {} -> {}. Scheduling update.",
620							MajorVersion,
621							CachedVersion.unwrap(),
622							FullVersion
623						);
624					} else {
625						info!("    Processing v{} (resolved to {})...", MajorVersion, FullVersion);
626					}
627
628					let ArchiveExtension = &Platform.ArchiveExtension;
629
630					let ArchiveName =
631						format!("node-{}-{}.{}", FullVersion, Platform.DownloadIdentifier, ArchiveExtension);
632
633					let DownloadURL = format!("https://nodejs.org/dist/{}/{}", FullVersion, ArchiveName);
634
635					let ExtractedFolderName = format!("node-{}-{}", FullVersion, Platform.DownloadIdentifier);
636
637					let Task = DownloadTask {
638						SidecarName:SidecarName.clone(),
639
640						MajorVersion:MajorVersion.clone(),
641
642						FullVersion,
643
644						DownloadURL,
645
646						TempParentDirectory:TempDownloadsDirectory.clone(),
647
648						DestinationDirectory,
649
650						ArchiveType:if ArchiveExtension == "zip" { ArchiveType::Zip } else { ArchiveType::TarGz },
651
652						ExtractedFolderName,
653
654						TauriTargetTriple:Platform.TauriTargetTriple.clone(),
655					};
656
657					TasksToRun.push(Task);
658				}
659
660				// To add Deno, you would add an `else if SidecarName == "DENO"`
661				// block here.
662			}
663		}
664	}
665
666	// --- Concurrent Execution Phase ---
667	if TasksToRun.is_empty() {
668		info!("All sidecar binaries are already up to date.");
669	} else {
670		info!("Found {} tasks to run. Starting concurrent downloads...", TasksToRun.len());
671
672		// Limit to 8 concurrent jobs or num CPUs, whichever is smaller.
673		let NumberOfConcurrentJobs = num_cpus::get().min(8);
674
675		// Spawn a Tokio task for each download.
676		// Run tasks concurrently.
677		let Results = stream::iter(TasksToRun)
678			.map(|Task| {
679				let Client = HttpClient.clone();
680
681				let Cache = Arc::clone(&Cache);
682
683				tokio::spawn(async move { ProcessDownloadTask(Task, Client, Cache).await })
684			})
685			.buffer_unordered(NumberOfConcurrentJobs)
686			.collect::<Vec<_>>()
687			.await;
688
689		// Check for any errors that occurred during the concurrent tasks.
690		let mut ErrorsEncountered = 0;
691
692		for Result in Results {
693			// The first result is from tokio::spawn, the second from our function
694			if let Err(JoinError) = Result {
695				error!("A download task panicked or was cancelled: {}", JoinError);
696
697				ErrorsEncountered += 1;
698			} else if let Ok(Err(AppError)) = Result {
699				// We already logged the error inside `ProcessDownloadTask`, so just count it.
700				// Re-logging here to ensure it's captured at a higher level if needed.
701				error!("A download task failed: {}", AppError);
702
703				ErrorsEncountered += 1;
704			}
705		}
706
707		if ErrorsEncountered > 0 {
708			error!("Completed with {} errors.", ErrorsEncountered);
709		}
710	}
711
712	// --- Finalization ---
713	info!("Saving updated cache...");
714
715	Cache.lock().unwrap().Save(&CachePath)?;
716
717	info!("All sidecar binaries have been successfully processed and organized.");
718
719	Ok(())
720}
721
722/// Main executable function.
723#[allow(unused)]
724fn main() {
725	// We use a block here to handle the Result from Fn.
726	if let Err(Error) = Fn() {
727		// The logger should already be initialized by Fn, so we can use it.
728		error!("The application encountered a fatal error: {}", Error);
729
730		std::process::exit(1);
731	}
732}
733
734// --- Imports ---
735use std::{
736	collections::{BTreeMap, HashMap},
737	env,
738	fs::{self, File},
739	io::Write,
740	path::{Path, PathBuf},
741	sync::{Arc, Mutex},
742};
743
744use anyhow::{Context, Result, anyhow};
745use colored::*;
746use futures::stream::{self, StreamExt};
747use log::{LevelFilter, error, info, warn};
748use reqwest::Client;
749use serde::{Deserialize, Serialize};
750use tempfile::Builder;