dev_scripts/sync/
pkgsrc.rs1use std::{
2 collections::{HashMap, HashSet},
3 fs::remove_dir_all,
4 path::{Path, PathBuf},
5 process::Command,
6};
7
8use anyhow::{Context, Result};
9use log::{error, info};
10use rayon::prelude::*;
11use strum::Display;
12
13use super::filenames_in_dir;
14use crate::{cmd::ensure_success, ui::get_progress_bar};
15
16const PKGBASE_MAINTAINER_URL: &str = "https://archlinux.org/packages/pkgbase-maintainer";
17const SSH_HOST: &str = "git@gitlab.archlinux.org";
18const REPO_BASE_URL: &str = "archlinux/packaging/packages";
19
20const PACKAGE_REPO_RENAMES: [(&str, &str); 3] = [
23 ("gtk2+extra", "gtk2-extra"),
24 ("dvd+rw-tools", "dvd-rw-tools"),
25 ("tree", "unix-tree"),
26];
27
28pub struct PkgSrcDownloader {
33 pub dest: PathBuf,
35}
36
37impl PkgSrcDownloader {
38 pub fn download_package_source_repositories(&self) -> Result<()> {
40 let repos = reqwest::blocking::get(PKGBASE_MAINTAINER_URL)
44 .context("Failed to query pkgbase url.")?
45 .json::<HashMap<String, Vec<String>>>()
46 .context("Failed to deserialize archweb pkglist.")?;
47
48 let all_repo_names: Vec<String> = repos.keys().map(String::from).collect();
49 info!("Found {} official packages.", all_repo_names.len());
50
51 let download_dir = self.dest.join("download/pkgsrc");
52 self.parallel_update_or_clone(&all_repo_names, &download_dir)?;
53
54 self.remove_old_repos(&all_repo_names, &download_dir)?;
55
56 for repo in all_repo_names {
58 let download_path = download_dir.join(&repo);
59 if download_path.join(".SRCINFO").exists() {
60 let target_dir = self.dest.join("pkgsrc").join(&repo);
61 std::fs::create_dir_all(&target_dir)?;
62 std::fs::copy(download_path.join(".SRCINFO"), target_dir.join(".SRCINFO"))?;
63 }
64 }
65
66 Ok(())
67 }
68
69 fn remove_old_repos(&self, repos: &[String], download_dir: &Path) -> Result<()> {
75 let local_repositories = filenames_in_dir(download_dir)?;
77
78 let remote_pkgs: HashSet<String> = HashSet::from_iter(repos.iter().map(String::from));
80
81 let removed_pkgs: Vec<&String> = local_repositories.difference(&remote_pkgs).collect();
84
85 if !removed_pkgs.is_empty() {
87 info!("Found {} repositories for cleanup:", removed_pkgs.len());
88 for removed in removed_pkgs {
89 remove_dir_all(download_dir.join(removed))
90 .context("Failed to remove local repository {removed}")?;
91 }
92 }
93
94 Ok(())
95 }
96
97 fn parallel_update_or_clone(&self, repos: &[String], download_dir: &Path) -> Result<()> {
101 let progress_bar = get_progress_bar(repos.len() as u64);
102
103 warmup_ssh_session()?;
105
106 let results: Vec<Result<(), RepoUpdateError>> = repos
108 .par_iter()
109 .map(|repo| {
110 let target_dir = download_dir.join(repo);
111
112 let result = if target_dir.exists() {
115 update_repo(repo, &target_dir)
116 } else {
117 clone_repo(repo.to_string(), &target_dir)
118 };
119
120 progress_bar.inc(1);
122 result
123 })
124 .collect();
125
126 progress_bar.finish_with_message("All repositories cloned or updated.");
128
129 let mut error_iter = results.into_iter().filter_map(Result::err).peekable();
131 if error_iter.peek().is_some() {
132 error!("The command failed for the following repositories:");
133 for error in error_iter {
134 error!(
135 "{} failed for repo {} with error:\n{:?}",
136 error.operation, error.repo, error.inner
137 );
138 }
139 }
140
141 Ok(())
142 }
143}
144
145pub fn warmup_ssh_session() -> Result<()> {
152 let output = &Command::new("ssh")
153 .args(vec!["-T", SSH_HOST])
154 .output()
155 .context("Failed to start ssh warmup command")?;
156
157 ensure_success(output).context("Failed to run ssh warmup command:")
158}
159
160#[derive(Display)]
161enum RepoUpdateOperation {
162 Clone,
163 Update,
164}
165
166struct RepoUpdateError {
167 repo: String,
168 operation: RepoUpdateOperation,
169 inner: anyhow::Error,
170}
171
172fn update_repo(repo: &str, target_dir: &Path) -> Result<(), RepoUpdateError> {
175 let output = &Command::new("git")
177 .current_dir(target_dir)
178 .args(vec!["reset", "--hard"])
179 .output()
180 .map_err(|err| RepoUpdateError {
181 repo: repo.to_string(),
182 operation: RepoUpdateOperation::Update,
183 inner: err.into(),
184 })?;
185
186 ensure_success(output).map_err(|err| RepoUpdateError {
187 repo: repo.to_string(),
188 operation: RepoUpdateOperation::Update,
189 inner: err,
190 })?;
191
192 let output = Command::new("git")
193 .current_dir(target_dir)
194 .args(["pull", "--force"])
195 .output()
196 .map_err(|err| RepoUpdateError {
197 repo: repo.to_string(),
198 operation: RepoUpdateOperation::Update,
199 inner: err.into(),
200 })?;
201
202 ensure_success(&output).map_err(|err| RepoUpdateError {
203 repo: repo.to_string(),
204 operation: RepoUpdateOperation::Update,
205 inner: err,
206 })
207}
208
209fn clone_repo(mut repo: String, target_dir: &Path) -> Result<(), RepoUpdateError> {
211 for (to_replace, replace_with) in PACKAGE_REPO_RENAMES {
213 if repo == to_replace {
214 repo = replace_with.to_string();
215 }
216 }
217
218 repo = repo.replace("+", "plus");
221
222 let ssh_url = format!("{SSH_HOST}:{REPO_BASE_URL}/{repo}.git");
223
224 let output = &Command::new("git")
225 .arg("clone")
226 .arg(&ssh_url)
227 .arg(target_dir)
228 .output()
229 .map_err(|err| RepoUpdateError {
230 repo: repo.to_string(),
231 operation: RepoUpdateOperation::Clone,
232 inner: err.into(),
233 })?;
234
235 ensure_success(output).map_err(|err| RepoUpdateError {
236 repo: repo.to_string(),
237 operation: RepoUpdateOperation::Clone,
238 inner: err,
239 })
240}