dev_scripts/sync/
pkgsrc.rs1use std::{
7 collections::{HashMap, HashSet},
8 fs::{copy, create_dir_all, remove_dir_all},
9 path::Path,
10 process::Command,
11};
12
13use alpm_types::{PKGBUILD_FILE_NAME, SRCINFO_FILE_NAME};
14use log::{error, info, trace};
15use rayon::prelude::*;
16use reqwest::blocking::get;
17
18use super::filenames_in_dir;
19use crate::{
20 CacheDir,
21 Error,
22 cmd::ensure_success,
23 consts::{DOWNLOAD_DIR, PKGSRC_DIR},
24 ui::get_progress_bar,
25};
26
27const PKGBASE_MAINTAINER_URL: &str = "https://archlinux.org/packages/pkgbase-maintainer";
28const SSH_HOST: &str = "git@gitlab.archlinux.org";
29const REPO_BASE_URL: &str = "archlinux/packaging/packages";
30
31const PACKAGE_REPO_RENAMES: [(&str, &str); 3] = [
34 ("gtk2+extra", "gtk2-extra"),
35 ("dvd+rw-tools", "dvd-rw-tools"),
36 ("tree", "unix-tree"),
37];
38
39#[derive(Clone, Debug)]
44pub struct PkgSrcDownloader {
45 pub cache_dir: CacheDir,
47}
48
49impl PkgSrcDownloader {
50 pub fn download_package_source_repositories(&self) -> Result<(), Error> {
52 let repos = get(PKGBASE_MAINTAINER_URL)
56 .map_err(|source| Error::HttpQueryFailed {
57 context: "retrieving the list of pkgbases".to_string(),
58 source,
59 })?
60 .json::<HashMap<String, Vec<String>>>()
61 .map_err(|source| Error::HttpQueryFailed {
62 context: "deserializing the response as JSON".to_string(),
63 source,
64 })?;
65
66 let all_repo_names: Vec<String> = repos.keys().map(String::from).collect();
67 info!("Found {} official packages.", all_repo_names.len());
68
69 let download_dir = self.cache_dir.as_ref().join(DOWNLOAD_DIR).join(PKGSRC_DIR);
70
71 self.remove_old_repos(&all_repo_names, &download_dir)?;
73
74 self.parallel_update_or_clone(&all_repo_names, &download_dir)?;
76
77 for repo in all_repo_names {
79 let download_path = download_dir.join(&repo);
80 for file in [SRCINFO_FILE_NAME, PKGBUILD_FILE_NAME] {
81 if download_path.join(file).exists() {
82 let target_dir = self.cache_dir.as_ref().join(PKGSRC_DIR).join(&repo);
83 create_dir_all(&target_dir).map_err(|source| Error::IoPath {
84 path: target_dir.to_path_buf(),
85 context: "recursively creating a directory".to_string(),
86 source,
87 })?;
88 copy(download_path.join(file), target_dir.join(file)).map_err(|source| {
89 Error::IoPath {
90 path: download_path.join(file),
91 context: "copying the file to the target directory".to_string(),
92 source,
93 }
94 })?;
95 }
96 }
97 }
98
99 Ok(())
100 }
101
102 fn remove_old_repos(&self, repos: &[String], download_dir: &Path) -> Result<(), Error> {
108 let local_repositories = filenames_in_dir(download_dir)?;
110
111 let remote_pkgs: HashSet<String> = HashSet::from_iter(repos.iter().map(String::from));
113
114 let removed_pkgs: Vec<&String> = local_repositories.difference(&remote_pkgs).collect();
117
118 if !removed_pkgs.is_empty() {
120 info!("Found {} repositories for cleanup:", removed_pkgs.len());
121 for removed in removed_pkgs {
122 remove_dir_all(download_dir.join(removed)).map_err(|source| Error::IoPath {
123 path: download_dir.join(removed),
124 context: "removing the file".to_string(),
125 source,
126 })?;
127 }
128 }
129
130 Ok(())
131 }
132
133 fn parallel_update_or_clone(&self, repos: &[String], download_dir: &Path) -> Result<(), Error> {
137 let progress_bar = get_progress_bar(repos.len() as u64);
138
139 warmup_ssh_session()?;
141
142 let results: Vec<Result<(), Error>> = repos
144 .par_iter()
145 .map(|repo| {
146 let target_dir = download_dir.join(repo);
147
148 let result = if target_dir.exists() {
151 update_repo(repo, &target_dir)
152 } else {
153 clone_repo(repo.to_string(), &target_dir)
154 };
155
156 progress_bar.inc(1);
158 result
159 })
160 .collect();
161
162 progress_bar.finish_with_message("All repositories cloned or updated.");
164
165 let mut error_iter = results.into_iter().filter_map(Result::err).peekable();
167 if error_iter.peek().is_some() {
168 error!("The command failed for the following repositories:");
169 for error in error_iter {
170 error!("{error}");
171 }
172 }
173
174 Ok(())
175 }
176}
177
178pub fn warmup_ssh_session() -> Result<(), Error> {
185 let mut ssh_command = Command::new("ssh");
186 ssh_command.args(vec!["-T", SSH_HOST]);
187 trace!("Running command: {ssh_command:?}");
188 let output = &ssh_command.output().map_err(|source| Error::Io {
189 context: "running the SSH warmup command".to_string(),
190 source,
191 })?;
192
193 ensure_success(output, "Failed to run ssh warmup command".to_string())
194}
195
196fn update_repo(repo: &str, target_dir: &Path) -> Result<(), Error> {
199 let output = Command::new("git")
201 .current_dir(target_dir)
202 .args(vec!["reset", "--hard"])
203 .output()
204 .map_err(|source| Error::Io {
205 context: format!("resetting the package source repository \"{repo}\""),
206 source,
207 })?;
208
209 ensure_success(
210 &output,
211 format!("Resetting the package source repository \"{repo}\""),
212 )?;
213
214 let output = &Command::new("git")
215 .current_dir(target_dir)
216 .args(["pull", "--force"])
217 .output()
218 .map_err(|source| Error::Io {
219 context: format!("pulling the package source repository \"{repo}\""),
220 source,
221 })?;
222
223 ensure_success(
224 output,
225 format!("Pulling the package source repository \"{repo}\""),
226 )
227}
228
229fn clone_repo(mut repo: String, target_dir: &Path) -> Result<(), Error> {
231 for (to_replace, replace_with) in PACKAGE_REPO_RENAMES {
233 if repo == to_replace {
234 repo = replace_with.to_string();
235 }
236 }
237
238 repo = repo.replace("+", "plus");
241
242 let ssh_url = format!("{SSH_HOST}:{REPO_BASE_URL}/{repo}.git");
243
244 let output = &Command::new("git")
245 .arg("clone")
246 .arg(&ssh_url)
247 .arg(target_dir)
248 .output()
249 .map_err(|source| Error::Io {
250 context: format!("cloning the package source repository \"{repo}\""),
251 source,
252 })?;
253
254 ensure_success(
255 output,
256 format!("Cloning the package source repository \"{repo}\""),
257 )
258}