forked from zhaofengli/attic
-
Notifications
You must be signed in to change notification settings - Fork 0
/
main.rs
201 lines (169 loc) · 5.59 KB
/
main.rs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
use std::{
collections::{HashMap, HashSet},
fs,
ops::Deref,
sync::Arc,
};
use anyhow::Result;
use app_queue::{AppQueue, Job, JobBuilder};
use async_trait::async_trait;
use attic::{
cache::CacheName,
nix_store::{NixStore, StorePath, StorePathHash, ValidPathInfo},
};
use attic_client::{api::ApiClient, cache::CacheRef, config::Config, push::upload_path};
use futures::future::join_all;
use indicatif::MultiProgress;
use log::{debug, info, warn};
use nix::{sys::stat::Mode, unistd::mkfifo};
use once_cell::sync::Lazy;
use serde::{Deserialize, Serialize};
use tokio::{
io::{AsyncBufReadExt, BufReader},
net::unix::pipe,
};
static NIX_STORE: Lazy<Arc<NixStore>> = Lazy::new(|| Arc::new(NixStore::connect().unwrap()));
static CACHE_NAME: Lazy<CacheName> = Lazy::new(|| CacheName::new("chir-rs".to_string()).unwrap());
static API: Lazy<ApiClient> = Lazy::new(|| {
let config = Config::load().unwrap();
let cache_ref = CacheRef::DefaultServer(CACHE_NAME.clone());
let (_, server, _) = config.resolve_cache(&cache_ref).unwrap();
ApiClient::from_server_config(server.clone()).unwrap()
});
static MULTI_PROGRESS: Lazy<MultiProgress> = Lazy::new(MultiProgress::new);
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
struct QueuedInput {
path: StorePath,
}
#[typetag::serde]
#[async_trait]
impl Job for QueuedInput {
async fn run(&mut self, queue: Arc<AppQueue>) -> Result<()> {
debug!("Starting to preprocess path: {:?}", self.path);
let mut deps = NIX_STORE
.compute_fs_closure(self.path.clone(), false, true, true)
.await?;
deps.push(self.path.clone());
info!("Prepocess: {:?} has {} dependencies", self.path, deps.len());
let mut store_path_map: HashMap<StorePathHash, ValidPathInfo> = {
let futures = deps
.iter()
.map(|path| {
let path = path.clone();
let path_hash = path.to_hash();
async move {
let path_info = NIX_STORE.query_path_info(path).await?;
Ok((path_hash, path_info))
}
})
.collect::<Vec<_>>();
join_all(futures).await.into_iter().collect::<Result<_>>()?
};
info!(
"Fetched path info for {:?} ({} paths)",
self.path,
deps.len()
);
store_path_map.retain(|_, pi| {
!pi.sigs
.iter()
.any(|sig| sig.starts_with("cache.nixos.org-1:"))
});
info!(
"Non-upstream deps for {:?}: {}",
self.path,
store_path_map.len()
);
let missing_path_hashes: HashSet<StorePathHash> = {
let store_path_hashes = store_path_map.keys().map(|sph| sph.to_owned()).collect();
let res = API
.get_missing_paths(&CacheName::new("chir-rs".to_string())?, store_path_hashes)
.await?;
res.missing_paths.into_iter().collect()
};
store_path_map.retain(|sph, _| missing_path_hashes.contains(sph));
info!("Pre-processed path: {:?}", self.path);
for dep in store_path_map.values() {
let dep = &dep.path;
let job_id = format!("fetch_path_info:{:?}", dep.as_os_str());
JobBuilder::new(UploadPath { path: dep.clone() })
.id(job_id)
.schedule(&queue)
.await
.ok();
}
Ok(())
}
fn is_fatal_error(&self, _: &anyhow::Error) -> bool {
true
}
}
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
struct UploadPath {
path: StorePath,
}
#[typetag::serde]
#[async_trait]
impl Job for UploadPath {
async fn run(&mut self, _: Arc<AppQueue>) -> Result<()> {
let path_info = match NIX_STORE.query_path_info(self.path.clone()).await {
Ok(pi) => pi,
Err(e) => {
warn!("Path {:?} is not valid: {e:#?}. Skipping.", self.path);
return Ok(());
}
};
upload_path(
path_info,
Arc::clone(&*NIX_STORE),
API.clone(),
CACHE_NAME.deref(),
MULTI_PROGRESS.clone(),
false,
)
.await?;
Ok(())
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Serialize, Deserialize)]
enum PathState {
Queued,
InProgress,
}
async fn enqueue_thread(queue: Arc<AppQueue>) -> Result<()> {
let queue_path = std::env::var("QUEUE_PATH")?;
fs::remove_file(&queue_path).ok();
mkfifo(
&queue_path[..],
Mode::S_IRWXU | Mode::S_IWGRP | Mode::S_IWOTH,
)?;
let rx = pipe::OpenOptions::new()
.read_write(true)
.open_receiver(&queue_path)?;
let mut lines = BufReader::new(rx).lines();
while let Some(line) = lines.next_line().await? {
info!("Parsed line: {line:?}");
let root = match NIX_STORE.follow_store_path(line) {
Ok(root) => root,
Err(e) => {
eprintln!("Error: {}", e);
continue;
}
};
JobBuilder::new(QueuedInput { path: root })
.priority(1)
.schedule(&queue)
.await?;
}
Ok(())
}
#[tokio::main]
async fn main() -> Result<()> {
tracing_subscriber::fmt::init();
info!("Starting attic-queue");
let db_path = std::env::var("DATABASE_PATH")?;
let app_queue = AppQueue::new(db_path).await?;
Arc::clone(&app_queue).run_job_workers_default();
enqueue_thread(app_queue).await?;
Ok(())
}