From 0730c0f4d63e490fe87e5b096bbeea60fed3423d Mon Sep 17 00:00:00 2001 From: Mikail Bagishov Date: Thu, 20 Aug 2020 14:54:13 +0300 Subject: [PATCH] ppc: Rename to pps and refactor old name: problem package compiling (not cool) new name: problem preparation system (or services, as you wish) (cooler) new architecture: CLI and server CLI starts server in background and sends RPC requests to the server. Server actually does stuff. In future it will be possible to start one server which will process requests from many clients. RPC protocol is based on HTTP POST requests and defined via Rust structs. In future, we should make consuming PPS api from other languages easier (e.g. provide JSON schema for types). Also, it's worth extracting RPC implementation into jjs-commons. --- Cargo.lock | 115 +++++++-- Cargo.toml | 7 +- k8s/jjs/templates/init.yaml | 3 + src/dist-builder/src/emit.rs | 22 +- src/dist-builder/src/main.rs | 2 +- src/invoker/src/lib.rs | 2 +- src/invoker/src/main.rs | 1 + src/ppc/Cargo.toml | 30 --- src/ppc/src/compile.rs | 227 ------------------ src/ppc/src/import.rs | 144 ----------- src/ppc/src/main.rs | 188 --------------- src/pps/Readme.md | 2 + src/pps/api/Cargo.toml | 14 ++ src/pps/api/src/compile_problem.rs | 35 +++ src/pps/api/src/import_problem.rs | 52 ++++ src/pps/api/src/lib.rs | 48 ++++ src/pps/cli/Cargo.toml | 19 ++ src/{ppc => pps/cli}/Dockerfile | 4 +- src/pps/cli/src/client_util.rs | 30 +++ src/pps/cli/src/compile.rs | 73 ++++++ src/pps/cli/src/import.rs | 88 +++++++ src/pps/cli/src/main.rs | 56 +++++ .../cli/src}/progress_notifier.rs | 0 src/pps/server/Cargo.toml | 32 +++ src/{ppc => pps/server}/src/command.rs | 29 ++- src/pps/server/src/compile.rs | 70 ++++++ src/{ppc => pps/server}/src/compile/build.rs | 0 .../server}/src/compile/builder.rs | 115 +++++---- src/pps/server/src/import.rs | 93 +++++++ .../server}/src/import/checker_tpl.cmake | 0 .../server}/src/import/contest_import.rs | 0 .../src/import/default_valuer_config.yaml | 0 src/{ppc => pps/server}/src/import/gen.cmake | 0 .../server}/src/import/problem_importer.rs | 222 +++++++++++------ .../server}/src/import/solution.cmake | 0 .../server}/src/import/template.rs | 0 .../server}/src/import/valuer_cfg.pest | 0 .../server}/src/import/valuer_cfg.rs | 21 +- src/pps/server/src/lib.rs | 110 +++++++++ src/{ppc => pps/server}/src/manifest.rs | 0 src/toolkit/Dockerfile | 2 +- 41 files changed, 1095 insertions(+), 761 deletions(-) delete mode 100644 src/ppc/Cargo.toml delete mode 100644 src/ppc/src/compile.rs delete mode 100644 src/ppc/src/import.rs delete mode 100644 src/ppc/src/main.rs create mode 100644 src/pps/Readme.md create mode 100644 src/pps/api/Cargo.toml create mode 100644 src/pps/api/src/compile_problem.rs create mode 100644 src/pps/api/src/import_problem.rs create mode 100644 src/pps/api/src/lib.rs create mode 100644 src/pps/cli/Cargo.toml rename src/{ppc => pps/cli}/Dockerfile (78%) create mode 100644 src/pps/cli/src/client_util.rs create mode 100644 src/pps/cli/src/compile.rs create mode 100644 src/pps/cli/src/import.rs create mode 100644 src/pps/cli/src/main.rs rename src/{ppc/src/compile => pps/cli/src}/progress_notifier.rs (100%) create mode 100644 src/pps/server/Cargo.toml rename src/{ppc => pps/server}/src/command.rs (77%) create mode 100644 src/pps/server/src/compile.rs rename src/{ppc => pps/server}/src/compile/build.rs (100%) rename src/{ppc => pps/server}/src/compile/builder.rs (83%) create mode 100644 src/pps/server/src/import.rs rename src/{ppc => pps/server}/src/import/checker_tpl.cmake (100%) rename src/{ppc => pps/server}/src/import/contest_import.rs (100%) rename src/{ppc => pps/server}/src/import/default_valuer_config.yaml (100%) rename src/{ppc => pps/server}/src/import/gen.cmake (100%) rename src/{ppc => pps/server}/src/import/problem_importer.rs (60%) rename src/{ppc => pps/server}/src/import/solution.cmake (100%) rename src/{ppc => pps/server}/src/import/template.rs (100%) rename src/{ppc => pps/server}/src/import/valuer_cfg.pest (100%) rename src/{ppc => pps/server}/src/import/valuer_cfg.rs (88%) create mode 100644 src/pps/server/src/lib.rs rename src/{ppc => pps/server}/src/manifest.rs (100%) diff --git a/Cargo.lock b/Cargo.lock index a7893dab..87e6d117 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -360,7 +360,7 @@ dependencies = [ [[package]] name = "async-mpmc" version = "0.1.0" -source = "git+https://github.com/jjs-dev/commons#36141835e67d80a0078946077bc0053ba1d788cf" +source = "git+https://github.com/jjs-dev/commons#37012e3e385f1c50972d3007d0ab580faa30f868" dependencies = [ "tokio", "tracing", @@ -391,9 +391,9 @@ dependencies = [ [[package]] name = "async-trait" -version = "0.1.36" +version = "0.1.38" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a265e3abeffdce30b2e26b7a11b222fe37c6067404001b434101457d0385eb92" +checksum = "6e1a4a2f97ce50c9d0282c1468816208588441492b40d813b2e0419c22c05e7f" dependencies = [ "proc-macro2 1.0.19", "quote 1.0.7", @@ -990,9 +990,9 @@ checksum = "134951f4028bdadb9b84baf4232681efbf277da25144b9b0ad65df75946c422b" [[package]] name = "either" -version = "1.5.3" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb1f6b1ce1c140482ea30ddd3335fc0024ac7ee112895426e0a629a6c20adfe3" +checksum = "cd56b59865bce947ac5958779cfa508f6c3b9497cc762b7e24a12d11ccde2c4f" [[package]] name = "encode_unicode" @@ -1739,9 +1739,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" [[package]] name = "libc" -version = "0.2.74" +version = "0.2.76" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2f02823cf78b754822df5f7f268fb59822e7296276d3e069d8e8cb26a14bd10" +checksum = "755456fae044e6fa1ebbbd1b3e902ae19e73097ed4ed87bb79934a867c007bc3" [[package]] name = "libflate" @@ -2035,7 +2035,7 @@ dependencies = [ [[package]] name = "multiwake" version = "0.1.0" -source = "git+https://github.com/jjs-dev/commons#36141835e67d80a0078946077bc0053ba1d788cf" +source = "git+https://github.com/jjs-dev/commons#37012e3e385f1c50972d3007d0ab580faa30f868" [[package]] name = "native-tls" @@ -2436,29 +2436,65 @@ dependencies = [ ] [[package]] -name = "ppc" +name = "pps-api" +version = "0.1.0" +dependencies = [ + "anyhow", + "reqwest", + "rpc", + "serde", + "serde_json", + "tokio", + "tracing", +] + +[[package]] +name = "pps-cli" +version = "0.1.0" +dependencies = [ + "anyhow", + "clap 3.0.0-beta.1", + "pps-api", + "pps-server", + "rand 0.7.3", + "rpc", + "serde", + "serde_json", + "tokio", + "tracing", + "tracing-futures", + "util", +] + +[[package]] +name = "pps-server" version = "0.1.0" dependencies = [ "anyhow", "async-trait", + "either", "formatf", + "futures-util", "getrandom", "glob", "hex 0.4.2", + "hyper", "libc", - "multiwake", "pest", "pest_derive", "pom", + "pps-api", "roxmltree", + "rpc", "serde", "serde_json", "serde_yaml", - "structopt", "svaluer", "thiserror", "tokio", "toml", + "tracing", + "tracing-futures", ] [[package]] @@ -2585,7 +2621,7 @@ dependencies = [ [[package]] name = "puller" version = "0.1.0" -source = "git+https://github.com/jjs-dev/commons#36141835e67d80a0078946077bc0053ba1d788cf" +source = "git+https://github.com/jjs-dev/commons#37012e3e385f1c50972d3007d0ab580faa30f868" dependencies = [ "base64 0.12.3", "dkregistry", @@ -2910,13 +2946,29 @@ checksum = "cabe4fa914dec5870285fa7f71f602645da47c486e68486d2b4ceb4a343e90ac" [[package]] name = "roxmltree" -version = "0.10.1" +version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4129743af505f674602b97d02c8ebe526752de8811e21ecb2fb166fcf4afe157" +checksum = "17dfc6c39f846bfc7d2ec442ad12055d79608d501380789b965d22f9354451f2" dependencies = [ "xmlparser", ] +[[package]] +name = "rpc" +version = "0.1.0" +source = "git+https://github.com/jjs-dev/commons#37012e3e385f1c50972d3007d0ab580faa30f868" +dependencies = [ + "futures-util", + "hyper", + "reqwest", + "serde", + "serde_json", + "thiserror", + "tokio", + "tower-util", + "tracing", +] + [[package]] name = "rust-argon2" version = "0.7.0" @@ -3049,9 +3101,9 @@ checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" [[package]] name = "serde" -version = "1.0.114" +version = "1.0.115" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5317f7588f0a5078ee60ef675ef96735a1442132dc645eb1d12c018620ed8cd3" +checksum = "e54c9a88f2da7238af84b5101443f0c0d0a3bbdc455e34a5c9497b1903ed55d5" dependencies = [ "serde_derive", ] @@ -3068,9 +3120,9 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.114" +version = "1.0.115" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a0be94b04690fbaed37cddffc5c134bf537c8e3329d53e982fe04c374978f8e" +checksum = "609feed1d0a73cc36a0182a840a9b37b4a82f0b1150369f0536a9e3f2a31dc48" dependencies = [ "proc-macro2 1.0.19", "quote 1.0.7", @@ -3677,6 +3729,7 @@ dependencies = [ "signal-hook-registry", "slab", "tokio-macros", + "tracing", "winapi 0.3.9", ] @@ -3766,11 +3819,23 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e987b6bf443f4b5b3b6f38704195592cca41c5bb7aedd3c3693c7081f8289860" +[[package]] +name = "tower-util" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d1093c19826d33807c72511e68f73b4a0469a3f22c2bd5f7d5212178b4b89674" +dependencies = [ + "futures-core", + "futures-util", + "pin-project", + "tower-service", +] + [[package]] name = "tracing" -version = "0.1.18" +version = "0.1.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0aae59226cf195d8e74d4b34beae1859257efb4e5fed3f147d2dc2c7d372178" +checksum = "6d79ca061b032d6ce30c660fded31189ca0b9922bf483cd70759f13a2d86786c" dependencies = [ "cfg-if", "log", @@ -3780,9 +3845,9 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.9" +version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0693bf8d6f2bf22c690fc61a9d21ac69efdbb894a17ed596b9af0f01e64b84b" +checksum = "80e0ccfc3378da0cce270c946b676a376943f5cd16aeba64568e7939806f4ada" dependencies = [ "proc-macro2 1.0.19", "quote 1.0.7", @@ -3791,9 +3856,9 @@ dependencies = [ [[package]] name = "tracing-core" -version = "0.1.13" +version = "0.1.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d593f98af59ebc017c0648f0117525db358745a8894a8d684e185ba3f45954f9" +checksum = "db63662723c316b43ca36d833707cc93dff82a02ba3d7e354f342682cc8b3545" dependencies = [ "lazy_static", ] @@ -3804,6 +3869,8 @@ version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ab7bb6f14721aa00656086e9335d363c5c8747bae02ebe32ea2c7dece5689b4c" dependencies = [ + "futures 0.3.5", + "futures-task", "pin-project", "tracing", ] diff --git a/Cargo.toml b/Cargo.toml index 3efb0063..d433987c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -35,6 +35,7 @@ opt-level = 0 [workspace] # TODO: add other as they are fixed -members=["src/devtool", "src/gen-api-client", "src/cli", "src/client", - "src/problem-loader", "src/invoker", - "src/dist-files-generator", "src/dist-builder", "src/ppc", "src/svaluer", "src/invoker-api"] +members=[ "src/devtool", "src/gen-api-client", "src/cli", "src/client", + "src/problem-loader", "src/invoker", "src/dist-files-generator", + "src/dist-builder", "src/svaluer", "src/invoker-api", "src/pps/api", + "src/pps/cli", "src/pps/server" ] diff --git a/k8s/jjs/templates/init.yaml b/k8s/jjs/templates/init.yaml index 45b919d2..75211887 100644 --- a/k8s/jjs/templates/init.yaml +++ b/k8s/jjs/templates/init.yaml @@ -114,6 +114,9 @@ spec: - name: shared mountPath: /shared command: ["/bin/bash", "/cmap/init_problems"] + env: + - name: RUST_LOG + value: info,pps_cli=trace,pps_server=trace,pps_api=trace - name: problems-upload image: "{{ .Values.image.repositoryPrefix }}toolkit:{{ .Values.image.tag }}" imagePullPolicy: {{ .Values.image.pullPolicy }} diff --git a/src/dist-builder/src/emit.rs b/src/dist-builder/src/emit.rs index 98b5bdf5..72542a76 100644 --- a/src/dist-builder/src/emit.rs +++ b/src/dist-builder/src/emit.rs @@ -5,12 +5,24 @@ use crate::{ Params, }; use anyhow::Context as _; -use std::{io::Write, path::Path, process::Command}; +use std::{ + io::Write, + path::{Path, PathBuf}, + process::Command, +}; use util::cmd::CommandExt; pub(crate) struct DockerEmitter; impl DockerEmitter { + fn package_name_to_path(pkg_name: &str) -> PathBuf { + if pkg_name.starts_with("pps-") { + pkg_name.replace('-', "/").into() + } else { + pkg_name.into() + } + } + fn emit_inner( params: &Params, docker_context: &Path, @@ -20,7 +32,13 @@ impl DockerEmitter { let mut cmd = Command::new(¶ms.cfg.build.tool_info.docker); cmd.arg("build"); cmd.arg("-f"); - cmd.arg(params.src.join("src").join(pkg_name).join("Dockerfile")); + cmd.arg( + params + .src + .join("src") + .join(Self::package_name_to_path(pkg_name)) + .join("Dockerfile"), + ); let tag = options .tag .clone() diff --git a/src/dist-builder/src/main.rs b/src/dist-builder/src/main.rs index 6e05a572..33eb985a 100644 --- a/src/dist-builder/src/main.rs +++ b/src/dist-builder/src/main.rs @@ -190,7 +190,7 @@ fn make_rust_package_list() -> Vec { //add("cleanup", "jjs-cleanup", Section::Tool); //add("envck", "jjs-env-check", Section::Tool); //add("setup", "jjs-setup", Section::Tool); - add("ppc", "jjs-ppc", Section::Tool); + add("pps-cli", "jjs-pps", Section::Tool); //add("userlist", "jjs-userlist", Section::Tool); add("cli", "jjs-cli", Section::Tool); add("invoker", "jjs-invoker", Section::Daemon); diff --git a/src/invoker/src/lib.rs b/src/invoker/src/lib.rs index 36391460..6b17d7f1 100644 --- a/src/invoker/src/lib.rs +++ b/src/invoker/src/lib.rs @@ -1,4 +1,4 @@ -#![feature(proc_macro_hygiene, type_alias_impl_trait)] +#![type_length_limit = "4323264"] pub mod api; pub mod config; pub mod controller; diff --git a/src/invoker/src/main.rs b/src/invoker/src/main.rs index 395d0ef2..de0f7aaa 100644 --- a/src/invoker/src/main.rs +++ b/src/invoker/src/main.rs @@ -1,3 +1,4 @@ +#![type_length_limit = "4323264"] use anyhow::Context; use invoker::controller::JudgeRequestAndCallbacks; use std::sync::Arc; diff --git a/src/ppc/Cargo.toml b/src/ppc/Cargo.toml deleted file mode 100644 index 00de4570..00000000 --- a/src/ppc/Cargo.toml +++ /dev/null @@ -1,30 +0,0 @@ -[package] -name = "ppc" -version = "0.1.0" -authors = ["Mikail Bagishov "] -edition = "2018" -description = """ -PPC is tool for prepararing problems and contests -""" - -[dependencies] -structopt = "0.3.13" -serde = { version = "1.0.106", features = ["derive"] } -toml = "0.5.6" -glob = "0.3.0" -libc = "0.2.68" -pom = {path = "../pom"} -serde_json = "1.0.51" -getrandom = "0.1.14" -hex = "0.4.2" -formatf = { git = "https://github.com/mikailbag/formatf" } -roxmltree = "0.10.1" -anyhow = "1.0.28" -svaluer = {path = "../svaluer"} -thiserror = "1.0.15" -pest = "2.1.3" -pest_derive = "2.1.0" -serde_yaml = "0.8.11" -tokio = { version = "0.2.18", features = ["process", "macros", "rt-threaded", "fs"] } -async-trait = "0.1.30" -multiwake = { git = "https://github.com/jjs-dev/commons" } diff --git a/src/ppc/src/compile.rs b/src/ppc/src/compile.rs deleted file mode 100644 index 48bcc9b8..00000000 --- a/src/ppc/src/compile.rs +++ /dev/null @@ -1,227 +0,0 @@ -//! This module implements compiling source package into invoker package -pub(crate) mod build; -mod builder; -mod progress_notifier; - -use anyhow::Context as _; -use std::{ - future::Future, - path::{Path, PathBuf}, - sync::{Arc, RwLock}, -}; -use tokio::sync::{mpsc, oneshot}; - -/// Represents single request data to CompilerService. -pub struct CompileSingleProblemArgs { - pub pkg_path: PathBuf, - pub out_path: PathBuf, - pub force: bool, -} - -struct ServiceRequest { - /// Request data, provided by user - args: CompileSingleProblemArgs, - /// channel which should receive response. - /// Dropping it does not cancel build. - chan: oneshot::Sender>, -} - -/// How many requests to service can be queued. -/// Since service processes each request in new task, and does not perform -/// any rate limiting, this setting does not limit amount of outstanding -/// requests, so this value can be pretty low and does not need configuration. -const CHANNEL_CAPACITY: usize = 16; - -/// Represents long-running background task, building problems on request. -/// When last `CompilerServiceClient` is dropped, service will stop automatically. -pub(crate) struct CompilerService { - /// State used when processing requests - data: ServiceData, - /// We receive requests from this channel - chan: mpsc::Receiver, - /// Notifies clients when service state changes - state_update_notify: multiwake::Sender, - /// Current service state - current_state: Arc>, -} - -#[derive(Clone)] -pub(crate) struct ServiceState { - /// true if service still accepts new requests - pub(crate) service_running: bool, - /// count of requests that are currently executed - pub(crate) in_flight_requests: usize, -} - -#[derive(Clone)] -struct ServiceData { - /// JJS installation directory (used to find JTL binaries) - jjs_dir: PathBuf, -} - -/// Handle for interacting with CompilerService -#[derive(Clone)] -pub(crate) struct CompilerServiceClient { - /// Channel used to send request to service - chan: Option>, - /// Used to wait for a service state change - state_update_notify: multiwake::Receiver, - /// Used to get current service state - current_state: Arc>, -} - -impl CompilerService { - pub(crate) async fn start() -> anyhow::Result { - let jjs_dir: PathBuf = std::env::var_os("JJS_PATH") - .context("JJS_PATH not set")? - .into(); - - let (tx, rx) = mpsc::channel(CHANNEL_CAPACITY); - let state = Arc::new(RwLock::new(ServiceState { - service_running: true, - in_flight_requests: 0, - })); - let (state_notify_tx, state_notify_rx) = multiwake::multiwake(); - let service = CompilerService { - chan: rx, - data: ServiceData { jjs_dir }, - current_state: state.clone(), - state_update_notify: state_notify_tx, - }; - tokio::task::spawn(async move { - if let Err(err) = service.serve().await { - eprintln!("Serve error: {:#}", err); - } - }); - Ok(CompilerServiceClient { - chan: Some(tx), - state_update_notify: state_notify_rx, - current_state: state, - }) - } - - /// Main service function, which executes clients' requests until - /// all senders are droppped. - async fn serve(mut self) -> anyhow::Result<()> { - while let Some(request) = self.chan.recv().await { - let data = self.data.clone(); - let state = self.current_state.clone(); - let notify = self.state_update_notify.clone(); - { - state.write().unwrap().in_flight_requests += 1; - notify.wake(); - } - tokio::task::spawn(async move { - let res = Self::compile_problem(data, request.args).await; - request.chan.send(res).ok(); - { - state.write().unwrap().in_flight_requests -= 1; - notify.wake(); - } - }); - } - - { - let mut state = self.current_state.write().unwrap(); - state.service_running = false; - } - self.state_update_notify.wake(); - - Ok(()) - } - - /// Executes single compilation request - compiled requested problem. - /// (I.e. ppc source package -> pom standalone package) - async fn compile_problem( - data: ServiceData, - args: CompileSingleProblemArgs, - ) -> anyhow::Result<()> { - if args.force { - std::fs::remove_dir_all(&args.out_path).ok(); - tokio::fs::create_dir_all(&args.out_path).await?; - } else { - crate::check_dir(&args.out_path, false /* TODO */)?; - } - let toplevel_manifest = args.pkg_path.join("problem.toml"); - let toplevel_manifest = std::fs::read_to_string(toplevel_manifest)?; - - let raw_problem_cfg: crate::manifest::RawProblem = - toml::from_str(&toplevel_manifest).expect("problem.toml parse error"); - let (problem_cfg, warnings) = raw_problem_cfg.postprocess()?; - - if !warnings.is_empty() { - eprintln!("{} warnings", warnings.len()); - for warn in warnings { - eprintln!("- {}", warn); - } - } - - let out_dir = args.out_path.canonicalize().expect("resolve out dir"); - let problem_dir = args - .pkg_path - .canonicalize() - .context("resolve problem dir")?; - - let builder = builder::ProblemBuilder { - cfg: &problem_cfg, - problem_dir: &problem_dir, - out_dir: &out_dir, - jtl_dir: &data.jjs_dir, - build_backend: &build::Pibs { - jjs_dir: Path::new(&data.jjs_dir), - }, - }; - builder.build().await - } -} - -impl CompilerServiceClient { - /// Asks CompilerService to compile specified problem. - /// If this client was closed, error is returned. - pub(crate) fn exec( - &self, - args: CompileSingleProblemArgs, - ) -> impl Future> + 'static { - let chan = self.chan.clone(); - - async move { - let mut sender = match &chan { - Some(chan) => chan.clone(), - None => anyhow::bail!("This client is closed"), - }; - let (done_tx, done_rx) = oneshot::channel(); - let req = ServiceRequest { - args, - chan: done_tx, - }; - if sender.send(req).await.is_err() { - anyhow::bail!("Task queue is full") - } - - match done_rx.await { - Ok(result) => result, - Err(_recv_error) => anyhow::bail!("Service is crashed or killed"), - } - } - } - - /// Shutdowns this client. If it was last non-closed client, service - /// will exit. All clones of this client will be in closed state too. - pub(crate) fn close(&mut self) { - self.chan.take(); - } - - /// Returns current service state. - /// If there are other clients, this state can be instantly outdated. - pub(crate) fn state(&self) -> ServiceState { - self.current_state.read().unwrap().clone() - } - - /// Waits until state has changed - pub(crate) async fn state_changed(&mut self) -> anyhow::Result<()> { - if self.state_update_notify.wait().await == multiwake::WaitResult::Closed { - anyhow::bail!("Service is crashed or killed") - } - Ok(()) - } -} diff --git a/src/ppc/src/import.rs b/src/ppc/src/import.rs deleted file mode 100644 index 59a6ef6f..00000000 --- a/src/ppc/src/import.rs +++ /dev/null @@ -1,144 +0,0 @@ -mod problem_importer; -mod template; -mod valuer_cfg; - -use anyhow::{bail, Context as _}; -use problem_importer::Importer; -use std::{ - collections::HashSet, - path::{Path, PathBuf}, -}; - -async fn import_one_problem( - services: &crate::Services, - src: &Path, - dest: &Path, - build: bool, - force: bool, -) -> anyhow::Result<()> { - let manifest_path = src.join("problem.xml"); - let manifest = std::fs::read_to_string(manifest_path).context("failed read problem.xml")?; - let doc = roxmltree::Document::parse(&manifest).context("parse error")?; - - let mut importer = Importer { - src: &src, - dest: &dest, - problem_cfg: Default::default(), - known_generators: HashSet::new(), - doc: doc.root_element(), - limits: pom::Limits::default(), - }; - - importer.run()?; - - let manifest_path = dest.join("problem.toml"); - let manifest_toml = - toml::Value::try_from(importer.problem_cfg.clone()).expect("serialize ppc config"); - let manifest_data = toml::ser::to_string_pretty(&manifest_toml).unwrap_or_else(|err| { - panic!( - "stringify ppc config: {}\n\nraw config: {:#?}", - err, &importer.problem_cfg - ) - }); - std::fs::write(manifest_path, manifest_data).expect("write ppc manifest"); - if build { - println!("Building problem {}", &importer.problem_cfg.name); - let problems_dir: PathBuf = std::env::var("JJS_DATA")?.into(); - let out_path = problems_dir - .join("var/problems") - .join(&importer.problem_cfg.name); - if force { - std::fs::remove_dir_all(&out_path).ok(); - } - std::fs::create_dir(&out_path)?; - let dest = dest.to_path_buf(); - crate::run_in_background(services.compiler.exec( - crate::compile::CompileSingleProblemArgs { - pkg_path: dest, - out_path, - force, - }, - )); - } - Ok(()) -} - -enum ImportKind { - Problem, - Contest, -} - -fn detect_import_kind(path: &Path) -> anyhow::Result { - if !path.exists() { - bail!("path {} does not exists", path.display()); - } - - if path.join("problem.xml").exists() { - return Ok(ImportKind::Problem); - } - if path.join("contest.xml").exists() { - return Ok(ImportKind::Contest); - } - - bail!("unknown src") -} - -pub(crate) async fn exec( - services: &crate::Services, - args: crate::args::ImportArgs, -) -> anyhow::Result<()> { - if args.force { - std::fs::remove_dir_all(&args.out_path).ok(); - std::fs::create_dir(&args.out_path).context("create out dir")?; - } else { - crate::check_dir(&PathBuf::from(&args.out_path), false /* TODO */)?; - } - - let src = Path::new(&args.in_path); - let dest = Path::new(&args.out_path); - let kind = detect_import_kind(src).context("failed to detect import operation kind")?; - match kind { - ImportKind::Problem => { - import_one_problem(services, src, dest, args.build, args.force).await?; - } - ImportKind::Contest => { - println!("Importing contest"); - println!("Importing problems"); - let items = src.join("problems").read_dir()?; - for item in items { - let item = item?; - let problem_name = item.file_name(); - println!( - "--- Importing problem {} ---", - problem_name.to_string_lossy() - ); - let problem_dir = item.path(); - let target_dir = dest.join("problems").join(&problem_name); - std::fs::create_dir_all(&target_dir)?; - import_one_problem(services, &problem_dir, &target_dir, args.build, args.force) - .await?; - } - if args.update_cfg { - anyhow::bail!("TODO"); - /* - let contest_name = args - .contest_name - .as_ref() - .ok_or_else(|| anyhow::anyhow!("missing --contest-name"))?; - let contest_config = contest_import::import(&src.join("contest.xml"), contest_name) - .context("import contest config")?; - let jjs_data_dir = std::env::var("JJS_DATA").context("JJS_DATA missing")?; - let path = PathBuf::from(jjs_data_dir) - .join("etc/objects/contests") - .join(format!("{}.yaml", contest_name)); - if path.exists() && !args.force { - anyhow::bail!("path {} already exists", path.display()); - } - let contest_config = serde_yaml::to_string(&contest_config)?; - std::fs::write(path, contest_config)?; - */ - } - } - } - Ok(()) -} diff --git a/src/ppc/src/main.rs b/src/ppc/src/main.rs deleted file mode 100644 index 77beebc0..00000000 --- a/src/ppc/src/main.rs +++ /dev/null @@ -1,188 +0,0 @@ -#![feature(is_sorted)] -#![allow(clippy::needless_lifetimes)] - -mod command; -mod compile; -mod import; -mod manifest; - -mod args { - use std::path::PathBuf; - use structopt::StructOpt; - - #[derive(Debug, StructOpt)] - pub struct CompileArgs { - /// Path to problem package root - #[structopt(long = "pkg", short = "P")] - pub pkg_path: Vec, - /// Output path - #[structopt(long = "out", short = "O")] - pub out_path: Vec, - /// Upload compiled packages - #[structopt(long, short = "u")] - pub upload: bool, - /// Rewrite dir - #[structopt(long, short = "F")] - pub force: bool, - } - - #[derive(StructOpt)] - pub struct ImportArgs { - /// Path to package being imported - #[structopt(long = "in", short = "I")] - pub in_path: String, - /// Out path (will contain ppc package) - #[structopt(long = "out", short = "O")] - pub out_path: String, - /// Rewrite dir - #[structopt(long, short = "F")] - pub force: bool, - /// Write contest config to jjs data_dir. - /// This option can only be used when importing contest - #[structopt(long, short = "C")] - pub update_cfg: bool, - /// Imported contest name - /// This option can only be used when importing contest - #[structopt(long, short = "N")] - pub contest_name: Option, - /// Build imported problems and install them to jjs data_dir - #[structopt(long, short = "B")] - pub build: bool, - } - - #[derive(StructOpt)] - #[structopt(author, about)] - pub enum Args { - Compile(CompileArgs), - Import(ImportArgs), - } -} - -use anyhow::Context as _; -use args::Args; -use std::{future::Future, path::Path, process::Stdio}; - -fn check_dir(path: &Path, allow_nonempty: bool) -> anyhow::Result<()> { - if !path.exists() { - anyhow::bail!("error: path {} not exists", path.display()); - } - if !path.is_dir() { - anyhow::bail!("error: path {} is not directory", path.display()); - } - if !allow_nonempty && path.read_dir().unwrap().next().is_some() { - anyhow::bail!("error: dir {} is not empty", path.display()); - } - Ok(()) -} - -#[cfg(target_os = "linux")] -fn tune_linux() -> anyhow::Result<()> { - let mut current_limit = libc::rlimit { - rlim_cur: 0, - rlim_max: 0, - }; - unsafe { - if libc::prlimit(0, libc::RLIMIT_STACK, std::ptr::null(), &mut current_limit) != 0 { - anyhow::bail!("get current RLIMIT_STACK"); - } - } - let new_limit = libc::rlimit { - rlim_cur: current_limit.rlim_max, - rlim_max: current_limit.rlim_max, - }; - unsafe { - if libc::prlimit(0, libc::RLIMIT_STACK, &new_limit, std::ptr::null_mut()) != 0 { - anyhow::bail!("update RLIMIT_STACK"); - } - } - - Ok(()) -} - -fn tune_resourece_limits() -> anyhow::Result<()> { - #[cfg(target_os = "linux")] - tune_linux()?; - - Ok(()) -} - -/// The most "interesting" functionality of ppc is contained in services, -/// following request-response pattern. It will simplify further daemon mode. -/// `Services` struct manages all there services. -/// # Drop -/// This struct must be dropped using `close` method for correctness. -struct Services { - pub compiler: compile::CompilerServiceClient, -} - -impl Services { - async fn new() -> anyhow::Result { - let compiler = compile::CompilerService::start().await?; - Ok(Self { compiler }) - } - - async fn shutdown(mut self) -> anyhow::Result<()> { - self.compiler.close(); - loop { - let state = self.compiler.state(); - if !state.service_running && state.in_flight_requests == 0 { - break; - } - self.compiler.state_changed().await?; - } - std::mem::forget(self); - Ok(()) - } -} - -impl Drop for Services { - fn drop(&mut self) { - if std::thread::panicking() { - // double panic is not cool - return; - } - panic!("ppc::Services must be consumed using shutdown()") - } -} - -fn run_in_background(fut: impl Future> + Send + 'static) { - tokio::task::spawn(async move { - if let Err(err) = fut.await { - eprintln!("Error: {:#}", err); - } - }); -} - -#[tokio::main] -async fn main() -> anyhow::Result<()> { - use structopt::StructOpt; - tune_resourece_limits()?; - let args = Args::from_args(); - let services = Services::new().await?; - let res = process_args(args, &services).await; - services.shutdown().await.context("finalization error")?; - res?; - Ok(()) -} - -async fn process_args(args: Args, services: &Services) -> anyhow::Result<()> { - match args { - Args::Compile(compile_args) => { - if compile_args.out_path.len() != compile_args.pkg_path.len() { - anyhow::bail!("count(--pkg) != count(--out)"); - } - for (out_path, pkg_path) in compile_args.out_path.iter().zip(&compile_args.pkg_path) { - let args = compile::CompileSingleProblemArgs { - out_path: out_path.clone(), - pkg_path: pkg_path.clone(), - force: compile_args.force, - }; - crate::run_in_background(services.compiler.exec(args)); - } - } - Args::Import(import_args) => { - import::exec(&services, import_args).await?; - } - } - Ok(()) -} diff --git a/src/pps/Readme.md b/src/pps/Readme.md new file mode 100644 index 00000000..adbae55e --- /dev/null +++ b/src/pps/Readme.md @@ -0,0 +1,2 @@ +# JJS Problem preparation system (PPS) +PPS goal is to provide next-generation problem preparation experience diff --git a/src/pps/api/Cargo.toml b/src/pps/api/Cargo.toml new file mode 100644 index 00000000..80ccd5d6 --- /dev/null +++ b/src/pps/api/Cargo.toml @@ -0,0 +1,14 @@ +[package] +name = "pps-api" +version = "0.1.0" +authors = ["Mikail Bagishov "] +edition = "2018" + +[dependencies] +serde = { version = "1.0.115", features = ["derive"] } +reqwest = "0.10.7" +anyhow = "1.0.32" +serde_json = "1.0.57" +tokio = { version = "0.2.22", features = ["sync"] } +tracing = "0.1.19" +rpc = { git = "https://github.com/jjs-dev/commons" } diff --git a/src/pps/api/src/compile_problem.rs b/src/pps/api/src/compile_problem.rs new file mode 100644 index 00000000..ce4808fc --- /dev/null +++ b/src/pps/api/src/compile_problem.rs @@ -0,0 +1,35 @@ +//! Specifies that problem contained in workspace +//! should be compiled +use serde::{Deserialize, Serialize}; +use std::path::PathBuf; + +#[derive(Serialize, Deserialize)] +pub struct Request { + /// Path to problem source directory + pub problem_path: PathBuf, + /// Where to put compiled package + pub out_path: PathBuf, + /// Ignore existing files in out_path + pub force: bool, +} + +#[derive(Serialize, Deserialize)] +pub enum Update { + /// Contains some warnings that should be displayed to used. + /// Appears at most once. + Warnings(Vec), + /// Solution with given name is being built + BuildSolution(String), + /// Test generator with given name is being built + BuildTestgen(String), + /// Checker building started + BuildChecker, + /// Test generation started. `count` tests will be processed. + /// Appears at most once before `GenerateTest` updates. + GenerateTests { count: usize }, + /// Test `test_id` is being generated. Total test count is `count`. + /// `test_id`s are in range 1..=`count`. It is gu + GenerateTest { test_id: usize }, + /// Valuer config is being copied + CopyValuerConfig, +} diff --git a/src/pps/api/src/import_problem.rs b/src/pps/api/src/import_problem.rs new file mode 100644 index 00000000..2204f061 --- /dev/null +++ b/src/pps/api/src/import_problem.rs @@ -0,0 +1,52 @@ +//! Import problem from some other format +use serde::{Deserialize, Serialize}; +use std::path::PathBuf; +#[derive(Serialize, Deserialize)] +pub struct Request { + /// this path specifies file or files that should be imported + pub src_path: PathBuf, + /// where to put generated problem source + pub out_path: PathBuf, + /// do not check that dest is empty + pub force: bool, +} + +#[derive(Serialize, Deserialize)] +pub enum Update { + /// Contains one property of discovered problem. + /// Each `property_name` will be reported at most once. + Property { + property_name: PropertyName, + property_value: String, + }, + /// Contains one warnings. May appear multiple times. + Warning(String), + /// Started importing checker + ImportChecker, + /// Started importing tests + ImportTests, + /// Finished importing tests. `count` tests imported. + ImportTestsDone { count: usize }, + /// Started importing solutions + ImportSolutions, + /// Started importing solution with specific name + ImportSolution(String), + /// Valuer config is detected and will be imported + ImportValuerConfig, + /// Valuer config was not found, default will be used + DefaultValuerConfig, +} + +#[derive(Serialize, Deserialize)] +pub enum PropertyName { + /// Value is time limit in milliseconds. + TimeLimit, + /// Value is memory limit in milliseconds. + MemoryLimit, + /// Value is printf-style pattern of input files. + InputPathPattern, + /// Value is printf-style pattern of output files. + OutputPathPattern, + /// Value is problem title. + ProblemTitle, +} diff --git a/src/pps/api/src/lib.rs b/src/pps/api/src/lib.rs new file mode 100644 index 00000000..bf97deca --- /dev/null +++ b/src/pps/api/src/lib.rs @@ -0,0 +1,48 @@ +//! All PPS apis. +//! +//! All paths are relative to workspace root. +pub mod compile_problem; +pub mod import_problem; + +use rpc::Route; +use std::convert::Infallible; +pub struct CompileProblem(Infallible); + +impl Route for CompileProblem { + type Request = rpc::Unary; + type Response = rpc::Streaming; + + const ENDPOINT: &'static str = "/problems/compile"; +} + +pub struct ImportProblem(Infallible); + +impl Route for ImportProblem { + type Request = rpc::Unary; + type Response = rpc::Streaming; + + const ENDPOINT: &'static str = "/problems/import"; +} + +/// Contains possible error os success +#[derive(serde::Serialize, serde::Deserialize, Debug)] +#[must_use = "this is Result in fact"] +pub struct SimpleFinish(pub Result<(), StringError>); + +impl From> for SimpleFinish { + fn from(r: anyhow::Result<()>) -> Self { + Self(r.map_err(|e| StringError(format!("{:#}", e)))) + } +} + +#[derive(Debug, serde::Serialize, serde::Deserialize)] +#[serde(transparent)] +pub struct StringError(pub String); + +impl std::fmt::Display for StringError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.0.fmt(f) + } +} + +impl std::error::Error for StringError {} diff --git a/src/pps/cli/Cargo.toml b/src/pps/cli/Cargo.toml new file mode 100644 index 00000000..c90f2c95 --- /dev/null +++ b/src/pps/cli/Cargo.toml @@ -0,0 +1,19 @@ +[package] +name = "pps-cli" +version = "0.1.0" +authors = ["Mikail Bagishov "] +edition = "2018" + +[dependencies] +serde = { version = "1.0.106", features = ["derive"] } +serde_json = "1.0.51" +anyhow = "1.0.28" +tokio = { version = "0.2.18", features = ["process", "macros", "rt-threaded", "fs"] } +clap = "3.0.0-beta.1" +pps-api = { path = "../api" } +pps-server = { path = "../server" } +rand = "0.7.3" +util = { path = "../../util" } +tracing = "0.1.19" +tracing-futures = "0.2.4" +rpc = { git = "https://github.com/jjs-dev/commons" } diff --git a/src/ppc/Dockerfile b/src/pps/cli/Dockerfile similarity index 78% rename from src/ppc/Dockerfile rename to src/pps/cli/Dockerfile index 12a12b7b..209cf825 100644 --- a/src/ppc/Dockerfile +++ b/src/pps/cli/Dockerfile @@ -2,6 +2,6 @@ FROM debian:stable-slim # TODO: use rustls RUN apt-get update -y && apt-get install -y libssl-dev ENV JJS_AUTH_DATA=/auth/authdata.yaml JJS_PATH=/jtl -COPY jjs-ppc /jjs-ppc +COPY jjs-pps /jjs-pps VOLUME ["/auth"] -ENTRYPOINT ["/jjs-ppc"] +ENTRYPOINT ["/jjs-pps"] diff --git a/src/pps/cli/src/client_util.rs b/src/pps/cli/src/client_util.rs new file mode 100644 index 00000000..b562018d --- /dev/null +++ b/src/pps/cli/src/client_util.rs @@ -0,0 +1,30 @@ +// To start server, we need to know some free port. +// Even if there is a way to get this information, it would +// suffer from race conditions. +// That's why, we simply select random port and try using it. +// 20 iterations give negligible probality of failure. +const BIND_ATTEMPTS: usize = 20; + +#[tracing::instrument] +pub(crate) async fn create_server( + cancel: tokio::sync::CancellationToken, +) -> anyhow::Result<(tokio::sync::oneshot::Receiver<()>, rpc::Client)> { + // TODO provide way to customize port or port range + tracing::info!("launching server"); + let mut last_error = None; + for _ in 0..BIND_ATTEMPTS { + let port: u16 = rand::random(); + match pps_server::serve(port, cancel.clone()).await { + Ok(rx) => { + let endpoint = format!("http://127.0.0.1:{}", port); + let client = rpc::Client::new(rpc::ReqwestEngine::new(), endpoint); + return Ok((rx, client)); + } + Err(err) => { + tracing::warn!(error=?err, "bind attempt unsuccessful"); + last_error = Some(err) + } + } + } + Err(last_error.expect("BIND_ATTEMPTS != 0")) +} diff --git a/src/pps/cli/src/compile.rs b/src/pps/cli/src/compile.rs new file mode 100644 index 00000000..b3d884e7 --- /dev/null +++ b/src/pps/cli/src/compile.rs @@ -0,0 +1,73 @@ +use anyhow::Context as _; +use pps_api::compile_problem::Update; +use std::path::PathBuf; + +#[derive(Debug, clap::Clap)] +pub struct CompileArgs { + /// Path to problem package root + #[clap(long = "pkg", short = "P")] + pub pkg_path: Vec, + /// Output path + #[clap(long = "out", short = "O")] + pub out_path: Vec, + /// Rewrite dir + #[clap(long, short = "F")] + pub force: bool, +} + +#[tracing::instrument(skip(client, compile_args))] +pub async fn exec(client: &mut rpc::Client, compile_args: CompileArgs) -> anyhow::Result<()> { + if compile_args.out_path.len() != compile_args.pkg_path.len() { + anyhow::bail!("count(--pkg) != count(--out)"); + } + for (out_path, pkg_path) in compile_args.out_path.iter().zip(&compile_args.pkg_path) { + let req = pps_api::compile_problem::Request { + out_path: out_path.clone(), + problem_path: pkg_path.clone(), + force: compile_args.force, + }; + let (tx, mut resp) = client + .start::() + .await + .context("failed to start RPC call")?; + tx.send(req).await?; + let mut notifier = None; + while let Some(upd) = resp.next_event().await? { + match upd { + Update::Warnings(warnings) => { + if !warnings.is_empty() { + eprintln!("{} warnings", warnings.len()); + for warn in warnings { + eprintln!("- {}", warn); + } + } + } + Update::BuildSolution(solution_name) => { + println!("Building solution {}", &solution_name); + } + Update::BuildTestgen(testgen_name) => { + println!("Building generator {}", testgen_name); + } + Update::BuildChecker => { + println!("Building checker"); + } + Update::GenerateTests { count } => { + notifier = Some(crate::progress_notifier::Notifier::new(count)); + } + Update::GenerateTest { test_id } => { + notifier + .as_mut() + .expect("GenerateTest received before GenerateTests") + .maybe_notify(test_id); + } + Update::CopyValuerConfig => { + println!("Valuer config"); + } + } + } + if let Err(err) = resp.finish().await?.0 { + anyhow::bail!("Failed to build problem: {}", err) + } + } + Ok(()) +} diff --git a/src/pps/cli/src/import.rs b/src/pps/cli/src/import.rs new file mode 100644 index 00000000..b2c31eb0 --- /dev/null +++ b/src/pps/cli/src/import.rs @@ -0,0 +1,88 @@ +use anyhow::Context as _; +use pps_api::import_problem::{PropertyName, Update}; +use std::path::{Path, PathBuf}; + +#[derive(clap::Clap, Debug)] +pub struct ImportArgs { + /// Path to package being imported + #[clap(long = "in", short = "I")] + pub in_path: PathBuf, + /// Out path (will contain ppc package) + #[clap(long = "out", short = "O")] + pub out_path: PathBuf, + /// Rewrite dir + #[clap(long, short = "F")] + pub force: bool, + /// Imported contest name + /// This option can only be used when importing contest + #[clap(long, short = "N")] + pub contest_name: Option, +} + +async fn import_one_problem( + client: &mut rpc::Client, + src: &Path, + dest: &Path, + force: bool, +) -> anyhow::Result<()> { + let import_req = pps_api::import_problem::Request { + src_path: src.to_path_buf(), + out_path: dest.to_path_buf(), + force, + }; + let (tx, mut import) = client.start::().await?; + tx.send(import_req).await?; + while let Some(update) = import.next_event().await? { + match update { + Update::Property { + property_name, + property_value, + } => match property_name { + PropertyName::TimeLimit => println!("Time limit: {} ms", property_value), + PropertyName::MemoryLimit => { + let ml = property_value.parse::()?; + println!("Memory limit: {} bytes ({} MiBs)", ml, ml / (1 << 20)); + } + PropertyName::InputPathPattern => { + println!("Test input file path pattern: {}", property_value) + } + PropertyName::OutputPathPattern => { + println!("Test output file path pattern: {}", property_value) + } + PropertyName::ProblemTitle => println!("Problem title: {}", property_value), + }, + Update::Warning(warning) => eprintln!("warning: {}", warning), + Update::ImportChecker => println!("Importing checker"), + Update::ImportTests => println!("Importing tests"), + Update::ImportTestsDone { count } => println!("{} tests imported", count), + Update::ImportSolutions => println!("Importing solutions"), + Update::ImportSolution(solution) => println!("Importing solution '{}'", solution), + Update::ImportValuerConfig => println!("Importing valuer config"), + Update::DefaultValuerConfig => println!("Defaulting valuer config"), + } + } + import.finish().await?.0.context("build failure")?; + + println!("Import successful"); + + Ok(()) +} + +#[tracing::instrument(skip(client, args))] +pub(crate) async fn exec(client: &mut rpc::Client, args: ImportArgs) -> anyhow::Result<()> { + if args.force { + std::fs::remove_dir_all(&args.out_path).ok(); + std::fs::create_dir(&args.out_path).context("create out dir")?; + } else { + crate::check_dir(&PathBuf::from(&args.out_path), false /* TODO */)?; + } + + let src = &args.in_path; + let dest = &args.out_path; + + import_one_problem(client, src, dest, args.force).await?; + + // TODO support importing contests + + Ok(()) +} diff --git a/src/pps/cli/src/main.rs b/src/pps/cli/src/main.rs new file mode 100644 index 00000000..3d686a6c --- /dev/null +++ b/src/pps/cli/src/main.rs @@ -0,0 +1,56 @@ +#![feature(is_sorted)] +#![allow(clippy::needless_lifetimes)] + +mod client_util; +mod compile; +mod import; +mod progress_notifier; + +#[derive(clap::Clap, Debug)] +#[clap(author, about)] +pub enum Args { + Compile(compile::CompileArgs), + Import(import::ImportArgs), +} + +use anyhow::Context as _; +use std::path::Path; + +fn check_dir(path: &Path, allow_nonempty: bool) -> anyhow::Result<()> { + if !path.exists() { + anyhow::bail!("error: path {} not exists", path.display()); + } + if !path.is_dir() { + anyhow::bail!("error: path {} is not directory", path.display()); + } + if !allow_nonempty && path.read_dir().unwrap().next().is_some() { + anyhow::bail!("error: dir {} is not empty", path.display()); + } + Ok(()) +} + +#[tokio::main] +async fn main() -> anyhow::Result<()> { + use clap::Clap; + util::log::setup(); + let args = Args::parse(); + tracing::info!("starting new server in background"); + let cancel = tokio::sync::CancellationToken::new(); + let (server_done_rx, mut client) = client_util::create_server(cancel.clone()).await?; + process_args(args, &mut client) + .await + .context("failed to process args")?; + cancel.cancel(); + tracing::info!("waiting for server shutdown"); + server_done_rx.await.ok(); + Ok(()) +} + +#[tracing::instrument(skip(args, client))] +async fn process_args(args: Args, client: &mut rpc::Client) -> anyhow::Result<()> { + tracing::info!(args=?args, "executing requested command"); + match args { + Args::Compile(compile_args) => compile::exec(client, compile_args).await, + Args::Import(import_args) => import::exec(client, import_args).await, + } +} diff --git a/src/ppc/src/compile/progress_notifier.rs b/src/pps/cli/src/progress_notifier.rs similarity index 100% rename from src/ppc/src/compile/progress_notifier.rs rename to src/pps/cli/src/progress_notifier.rs diff --git a/src/pps/server/Cargo.toml b/src/pps/server/Cargo.toml new file mode 100644 index 00000000..c68d3904 --- /dev/null +++ b/src/pps/server/Cargo.toml @@ -0,0 +1,32 @@ +[package] +name = "pps-server" +version = "0.1.0" +authors = ["Mikail Bagishov "] +edition = "2018" + +[dependencies] +tokio = { version = "0.2.22", features = ["macros", "tracing"] } +serde = "1.0.115" +serde_json = "1.0.57" +anyhow = "1.0.32" +toml = "0.5.6" +thiserror = "1.0.20" +async-trait = "0.1.38" +getrandom = "0.1.14" +glob = "0.3.0" +pom = { path = "../../pom" } +libc = "0.2.76" +roxmltree = "0.13.0" +serde_yaml = "0.8.13" +pest = "2.1.3" +pest_derive = "2.1.0" +formatf = { git = "https://github.com/mikailbag/formatf" } +svaluer = { path = "../../svaluer" } +hex = "0.4.2" +pps-api = { path = "../api" } +hyper = "0.13.7" +tracing = "0.1.19" +tracing-futures = { version = "0.2.4", features = ["futures-03"] } +either = "1.6.0" +rpc = { git = "https://github.com/jjs-dev/commons" } +futures-util = "0.3.5" diff --git a/src/ppc/src/command.rs b/src/pps/server/src/command.rs similarity index 77% rename from src/ppc/src/command.rs rename to src/pps/server/src/command.rs index 7cd0cbcf..9572e331 100644 --- a/src/ppc/src/command.rs +++ b/src/pps/server/src/command.rs @@ -1,4 +1,5 @@ // TODO: split all this stuff to separate library +use anyhow::Context; use serde::{Deserialize, Serialize}; use std::ffi::{OsStr, OsString}; @@ -11,8 +12,8 @@ pub struct Command { } impl Command { - pub fn to_std_command(&self) -> std::process::Command { - let mut cmd = std::process::Command::new(&self.exe); + pub fn to_tokio_command(&self) -> tokio::process::Command { + let mut cmd = tokio::process::Command::new(&self.exe); cmd.args(self.argv.iter()); if let Some(cwd) = &self.cwd { cmd.current_dir(cwd); @@ -37,13 +38,13 @@ impl Command { out } - pub fn run_quiet(&mut self) -> std::process::Output { + pub async fn run_quiet(&mut self) -> anyhow::Result { use std::os::unix::process::ExitStatusExt; - let mut s = self.to_std_command(); - let out = s.output().expect("couldn't spawn"); + let mut s = self.to_tokio_command(); + let out = s.output().await.context("couldn't spawn")?; let status = out.status; if status.success() { - return out; + return Ok(out); } let exit_code = if status.code().is_some() { @@ -51,15 +52,13 @@ impl Command { } else { format!("signaled: {}", status.signal().unwrap()) }; - eprintln!( - "Child did not finished successfully (exit code {})", - exit_code - ); - - eprintln!("command: `{}`", self); - eprintln!("child stdout:\n{}", String::from_utf8_lossy(&out.stdout)); - eprintln!("child stderr:\n{}", String::from_utf8_lossy(&out.stderr)); - std::process::exit(1); + anyhow::bail!( + "child process did not finished successfully (exit code {})\ncommand: `{}`\nchild stdout:\n{}\nchild stderr:\n{}", + exit_code, + self, + String::from_utf8_lossy(&out.stdout), + String::from_utf8_lossy(&out.stderr) + ) } } diff --git a/src/pps/server/src/compile.rs b/src/pps/server/src/compile.rs new file mode 100644 index 00000000..fbb17852 --- /dev/null +++ b/src/pps/server/src/compile.rs @@ -0,0 +1,70 @@ +//! This module implements compiling source package into invoker package +pub(crate) mod build; +mod builder; + +use anyhow::Context as _; +use pps_api::{ + compile_problem::{Request, Update}, + SimpleFinish, +}; +use std::{path::Path, sync::Arc}; + +impl rpc::Handler for crate::Service { + type Error = anyhow::Error; + type Fut = futures_util::future::BoxFuture<'static, Result<(), Self::Error>>; + + fn handle( + self, + rx: rpc::UnaryRx, + mut tx: rpc::StreamingTx, + ) -> Self::Fut { + Box::pin(async move { + let args = rx.recv().await?; + let response = exec_compile_request(args, self.0, &mut tx).await; + tx.finish(response.into()).await?; + Ok(()) + }) + } +} + +/// This function actually implements request processing. +/// It's return value is used as response. +pub(crate) async fn exec_compile_request( + args: Request, + data: Arc, + tx: &mut rpc::StreamingTx, +) -> anyhow::Result<()> { + if args.force { + std::fs::remove_dir_all(&args.out_path).ok(); + tokio::fs::create_dir_all(&args.out_path).await?; + } else { + crate::check_dir(&args.out_path, false /* TODO */)?; + } + let toplevel_manifest = args.problem_path.join("problem.toml"); + let toplevel_manifest = std::fs::read_to_string(toplevel_manifest)?; + + let raw_problem_cfg: crate::manifest::RawProblem = + toml::from_str(&toplevel_manifest).context("problem.toml parse error")?; + let (problem_cfg, warnings) = raw_problem_cfg.postprocess()?; + + tx.send_event(Update::Warnings(warnings)).await?; + + let out_dir = args.out_path.canonicalize().context("resolve out dir")?; + let problem_dir = args + .problem_path + .canonicalize() + .context("resolve problem dir")?; + + let mut builder = builder::ProblemBuilder { + cfg: &problem_cfg, + problem_dir: &problem_dir, + out_dir: &out_dir, + jtl_dir: &data.jjs_dir, + build_backend: &build::Pibs { + jjs_dir: Path::new(&data.jjs_dir), + }, + tx, + }; + builder.build().await?; + Ok(()) +} diff --git a/src/ppc/src/compile/build.rs b/src/pps/server/src/compile/build.rs similarity index 100% rename from src/ppc/src/compile/build.rs rename to src/pps/server/src/compile/build.rs diff --git a/src/ppc/src/compile/builder.rs b/src/pps/server/src/compile/builder.rs similarity index 83% rename from src/ppc/src/compile/builder.rs rename to src/pps/server/src/compile/builder.rs index 3077b32e..d00fc6c2 100644 --- a/src/ppc/src/compile/builder.rs +++ b/src/pps/server/src/compile/builder.rs @@ -1,15 +1,14 @@ use crate::{ command::Command, - compile::{ - build::{BuildBackend, Task, TaskError}, - progress_notifier::Notifier, - }, + compile::build::{BuildBackend, Task, TaskError}, }; use anyhow::Context as _; use pom::{FileRef, FileRefRoot, Limits}; +use pps_api::compile_problem::Update; use std::{ collections::HashMap, - os::unix::{io::IntoRawFd, process::CommandExt}, + fmt::Write, + os::unix::io::IntoRawFd, path::{Path, PathBuf}, }; @@ -26,6 +25,8 @@ pub(crate) struct ProblemBuilder<'a> { pub(crate) jtl_dir: &'a Path, /// Used to execute build tasks (e.g. builds checker or solution) pub(crate) build_backend: &'a dyn BuildBackend, + /// Used to return live building progress + pub(crate) tx: &'a mut rpc::StreamingTx, } /// Fills given buffer with random hex string @@ -87,13 +88,29 @@ impl<'a> ProblemBuilder<'a> { match self.build_backend.process_task(task.clone()).await { Ok(cmd) => Ok(cmd.command), Err(err) => { - eprintln!("Build error: unable to run build task: {}", err); + let mut description = String::new(); + writeln!( + &mut description, + "Build error: unable to run build task: {}", + err + ) + .unwrap(); if let TaskError::ExitCodeNonZero(out) = err { - eprintln!("--- stdout ---\n{}", String::from_utf8_lossy(&out.stdout)); - eprintln!("--- stderr ---\n{}", String::from_utf8_lossy(&out.stderr)); + writeln!( + &mut description, + "--- stdout ---\n{}", + String::from_utf8_lossy(&out.stdout) + ) + .unwrap(); + writeln!( + &mut description, + "--- stderr ---\n{}", + String::from_utf8_lossy(&out.stderr) + ) + .unwrap(); } - eprintln!("Build task: {:#?}", task); - anyhow::bail!("task execution error") + writeln!(&mut description, "Build task: {:#?}", task).unwrap(); + anyhow::bail!("task execution error: {}", description) } } } @@ -118,14 +135,17 @@ impl<'a> ProblemBuilder<'a> { } /// Builds single solution - async fn build_solution(&self, sol_path: PathBuf) -> anyhow::Result<(String, Command)> { + async fn build_solution(&mut self, sol_path: PathBuf) -> anyhow::Result<(String, Command)> { let sol_id = sol_path .file_stem() .unwrap() .to_str() .context("path is not utf8")? .to_owned(); - println!("Building solution {}", &sol_id); + self.tx + .send_event(Update::BuildSolution(sol_id.clone())) + .await?; + let out_path = format!("{}/assets/sol-{}", self.out_dir.display(), &sol_id); Ok(( sol_id, @@ -134,7 +154,7 @@ impl<'a> ProblemBuilder<'a> { } /// Builds all solutions - async fn build_solutions(&self) -> anyhow::Result> { + async fn build_solutions(&mut self) -> anyhow::Result> { let mut out = HashMap::new(); for solution_path in self.glob("solutions/*").await? { let (sol_id, cmd) = self.build_solution(solution_path).await?; @@ -145,17 +165,19 @@ impl<'a> ProblemBuilder<'a> { /// Builds single testgen async fn build_testgen( - &self, + &mut self, testgen_path: &Path, testgen_name: &str, ) -> anyhow::Result { - println!("Building generator {}", testgen_name); + self.tx + .send_event(Update::BuildTestgen(testgen_name.to_string())) + .await?; let out_path = format!("{}/assets/testgen-{}", self.out_dir.display(), testgen_name); self.do_build(testgen_path, &Path::new(&out_path)).await } /// Builds all testgens - async fn build_testgens(&self) -> anyhow::Result> { + async fn build_testgens(&mut self) -> anyhow::Result> { let mut out = HashMap::new(); for testgen in self.glob("generators/*").await? { let testgen_name = testgen @@ -178,17 +200,23 @@ impl<'a> ProblemBuilder<'a> { /// Builds all tests async fn build_tests( - &self, + &mut self, testgens: &HashMap, gen_answers: Option<&Command>, ) -> anyhow::Result> { let tests_path = format!("{}/assets/tests", self.out_dir.display()); std::fs::create_dir_all(&tests_path).expect("couldn't create tests output dir"); - let mut notifier = Notifier::new(self.cfg.tests.len()); + self.tx + .send_event(Update::GenerateTests { + count: self.cfg.tests.len(), + }) + .await?; let mut out = vec![]; for (i, test_spec) in self.cfg.tests.iter().enumerate() { let tid = i + 1; - notifier.maybe_notify(tid); + self.tx + .send_event(Update::GenerateTest { test_id: tid }) + .await?; let out_file_path = format!("{}/{}-in.txt", &tests_path, tid); match &test_spec.gen { @@ -208,13 +236,15 @@ impl<'a> ProblemBuilder<'a> { cmd.env("JJS_TEST_ID", &tid.to_string()); cmd.env("JJS_RANDOM_SEED", &entropy); self.configure_command(&mut cmd); - let gen_out = cmd.run_quiet(); - std::fs::write(&out_file_path, gen_out.stdout).expect("failed to write test"); + let gen_out = cmd.run_quiet().await?; + tokio::fs::write(&out_file_path, gen_out.stdout) + .await + .context("failed to write test")?; } crate::manifest::TestGenSpec::File { path } => { let src_path = self.problem_dir.join("tests").join(path); if let Err(e) = std::fs::copy(&src_path, &out_file_path) { - eprintln!( + anyhow::bail!( "Couldn't copy test data from {} to {}: {}", src_path.display(), out_file_path, @@ -241,7 +271,7 @@ impl<'a> ProblemBuilder<'a> { let mut cmd = cmd.clone(); self.configure_command(&mut cmd); - let mut cmd = cmd.to_std_command(); + let mut cmd = cmd.to_tokio_command(); let mut close_handles = vec![]; unsafe { let test_data_fd = test_data.into_std().await.into_raw_fd(); @@ -268,15 +298,13 @@ impl<'a> ProblemBuilder<'a> { .stdout(crate::Stdio::piped()) .stderr(crate::Stdio::piped()) .output() - .unwrap_or_else(|err| panic!("launch main solution error: {}", err)); + .await + .context("launch main solution error: {}")?; if !output.status.success() { - eprintln!( - "solution stderr: {}", - String::from_utf8_lossy(&output.stderr) - ); anyhow::bail!( - "Error while generating correct answer for test {}: main solution failed", - tid + "Error while generating correct answer for test {}: main solution failed: {}", + tid, + String::from_utf8_lossy(&output.stderr) ); } let short_file_path = format!("tests/{}-out.txt", tid); @@ -296,15 +324,16 @@ impl<'a> ProblemBuilder<'a> { } /// Builds all checkers (currently only one is supported) - async fn build_checkers(&self) -> anyhow::Result { + async fn build_checkers(&mut self) -> anyhow::Result { // TODO: support multi-file checkers let checker_path = format!("{}/checkers/main.cpp", self.problem_dir.display()); self.build_checker(&checker_path).await } /// Builds single checker - async fn build_checker(&self, checker_path: &str) -> anyhow::Result { + async fn build_checker(&mut self, checker_path: &str) -> anyhow::Result { let out_path = self.out_dir.join("assets/checker"); + self.tx.send_event(Update::BuildChecker).await?; match &self.cfg.check { crate::manifest::Check::Custom(_) => { self.do_build(Path::new(checker_path), Path::new(&out_path)) @@ -318,11 +347,6 @@ impl<'a> ProblemBuilder<'a> { let src_path = self .jtl_dir .join(format!("bin/builtin-checker-{}", bc.name)); - println!( - "Copying checker binary from {} to {}", - src_path.to_str().unwrap(), - out_path.join("bin").to_str().unwrap() - ); tokio::fs::create_dir(&out_path) .await .context("failed to create out directory")?; @@ -355,18 +379,19 @@ impl<'a> ProblemBuilder<'a> { /// Copies files that should just be copied as is. /// Currently, only such file is valuer config - fn copy_raw(&self) -> std::io::Result<()> { + async fn copy_raw(&mut self) -> anyhow::Result<()> { let valuer_cfg_dir = self.out_dir.join("assets/valuer-cfg"); if let Some(valuer_cfg) = &self.cfg.valuer_cfg { - println!("Valuer config"); + self.tx.send_event(Update::CopyValuerConfig).await?; + let src = self.problem_dir.join(valuer_cfg.trim_start_matches('/')); let dest = valuer_cfg_dir.join("cfg.yaml"); - std::fs::create_dir(&valuer_cfg_dir)?; + tokio::fs::create_dir(&valuer_cfg_dir).await?; if src.is_file() { - std::fs::copy(&src, &dest)?; + tokio::fs::copy(&src, &dest).await?; } else { // TODO - eprintln!("Multi-file valuer config is TODO"); + anyhow::bail!("Multi-file valuer config is TODO"); } } Ok(()) @@ -374,7 +399,7 @@ impl<'a> ProblemBuilder<'a> { /// Main method, which actually builds the problem into /// redistributable package. - pub async fn build(&self) -> anyhow::Result<()> { + pub async fn build(&mut self) -> anyhow::Result<()> { self.build_modules().await?; let solutions = self.build_solutions().await?; let testgen_launch_info = self.build_testgens().await?; @@ -411,9 +436,7 @@ impl<'a> ProblemBuilder<'a> { }; self.build_tests(&testgen_launch_info, gen_answers).await? }; - if let Err(e) = self.copy_raw() { - eprintln!("Error: {}", e); - } + self.copy_raw().await?; let valuer_exe = { let src = self.jtl_dir.join("bin/jjs-svaluer"); diff --git a/src/pps/server/src/import.rs b/src/pps/server/src/import.rs new file mode 100644 index 00000000..3c12a52e --- /dev/null +++ b/src/pps/server/src/import.rs @@ -0,0 +1,93 @@ +mod problem_importer; +mod template; +mod valuer_cfg; + +use anyhow::{bail, Context as _}; +use pps_api::{ + import_problem::{Request, Update}, + ImportProblem, SimpleFinish, +}; +use problem_importer::Importer; +use std::{collections::HashSet, path::Path}; + +impl rpc::Handler for crate::Service { + type Error = anyhow::Error; + type Fut = futures_util::future::BoxFuture<'static, anyhow::Result<()>>; + + fn handle( + self, + rx: rpc::UnaryRx, + mut tx: rpc::StreamingTx, + ) -> Self::Fut { + Box::pin(async move { + let req = rx.recv().await?; + let result = execute_import_request(req, &mut tx).await; + tx.finish(result.into()).await?; + Ok(()) + }) + } +} + +pub(crate) async fn execute_import_request( + req: Request, + tx: &mut rpc::StreamingTx, +) -> anyhow::Result<()> { + match detect_import_kind(&req.src_path)? { + ImportKind::Problem => (), + ImportKind::Contest => anyhow::bail!("TODO"), + } + import_problem(&req.src_path, &req.out_path, tx).await?; + + Ok(()) +} + +pub(crate) async fn import_problem( + src: &Path, + dest: &Path, + tx: &mut rpc::StreamingTx, +) -> anyhow::Result<()> { + let manifest_path = src.join("problem.xml"); + let manifest = std::fs::read_to_string(manifest_path).context("failed read problem.xml")?; + let doc = roxmltree::Document::parse(&manifest).context("parse error")?; + + let mut importer = Importer { + src: &src, + dest: &dest, + problem_cfg: Default::default(), + known_generators: HashSet::new(), + doc: doc.root_element(), + limits: pom::Limits::default(), + tx, + }; + + importer.run().await?; + + let manifest_path = dest.join("problem.toml"); + let manifest_toml = + toml::Value::try_from(importer.problem_cfg.clone()).context("serialize ppc config")?; + let manifest_data = toml::ser::to_string_pretty(&manifest_toml) + .with_context(|| format!("stringify ppc config: {:#?}", &importer.problem_cfg))?; + std::fs::write(manifest_path, manifest_data).expect("write ppc manifest"); + + Ok(()) +} + +enum ImportKind { + Problem, + Contest, +} + +fn detect_import_kind(path: &Path) -> anyhow::Result { + if !path.exists() { + bail!("path {} does not exists", path.display()); + } + + if path.join("problem.xml").exists() { + return Ok(ImportKind::Problem); + } + if path.join("contest.xml").exists() { + return Ok(ImportKind::Contest); + } + + bail!("unknown src") +} diff --git a/src/ppc/src/import/checker_tpl.cmake b/src/pps/server/src/import/checker_tpl.cmake similarity index 100% rename from src/ppc/src/import/checker_tpl.cmake rename to src/pps/server/src/import/checker_tpl.cmake diff --git a/src/ppc/src/import/contest_import.rs b/src/pps/server/src/import/contest_import.rs similarity index 100% rename from src/ppc/src/import/contest_import.rs rename to src/pps/server/src/import/contest_import.rs diff --git a/src/ppc/src/import/default_valuer_config.yaml b/src/pps/server/src/import/default_valuer_config.yaml similarity index 100% rename from src/ppc/src/import/default_valuer_config.yaml rename to src/pps/server/src/import/default_valuer_config.yaml diff --git a/src/ppc/src/import/gen.cmake b/src/pps/server/src/import/gen.cmake similarity index 100% rename from src/ppc/src/import/gen.cmake rename to src/pps/server/src/import/gen.cmake diff --git a/src/ppc/src/import/problem_importer.rs b/src/pps/server/src/import/problem_importer.rs similarity index 60% rename from src/ppc/src/import/problem_importer.rs rename to src/pps/server/src/import/problem_importer.rs index 4c1cb0a8..74a34e2d 100644 --- a/src/ppc/src/import/problem_importer.rs +++ b/src/pps/server/src/import/problem_importer.rs @@ -1,5 +1,9 @@ use anyhow::Context as _; -use std::{collections::HashSet, path::Path}; +use pps_api::{ + import_problem::{PropertyName, Update}, + SimpleFinish, +}; +use std::{collections::HashSet, future::Future, path::Path, pin::Pin}; pub(crate) struct Importer<'a> { pub(crate) src: &'a Path, @@ -8,6 +12,7 @@ pub(crate) struct Importer<'a> { pub(crate) known_generators: HashSet, pub(crate) doc: roxmltree::Node<'a, 'a>, pub(crate) limits: pom::Limits, + pub(crate) tx: &'a mut rpc::StreamingTx, } enum FileCategory { @@ -37,8 +42,13 @@ impl FileCategory { impl<'a> Importer<'a> { // is most important section for us: it contains information // about tests - fn process_judging_section(&mut self, node_judging: roxmltree::Node) -> anyhow::Result<()> { - let node_testset = node_judging.first_element_child().unwrap(); + async fn process_judging_section( + &mut self, + node_judging: roxmltree::Node<'_, '_>, + ) -> anyhow::Result<()> { + let node_testset = node_judging + .first_element_child() + .context(" element does not have a child")?; assert_eq!(node_testset.tag_name().name(), "testset"); let mut test_pattern = None; @@ -51,40 +61,61 @@ impl<'a> Importer<'a> { "time-limit" => { let tl = child .text() - .unwrap() + .context(" does not contain value")? .parse::() - .expect("parsing :"); - println!("time limit: {} ms", tl); + .context("parsing failed")?; + self.tx + .send_event(Update::Property { + property_name: PropertyName::TimeLimit, + property_value: tl.to_string(), + }) + .await?; self.limits.time.replace(tl); } "memory-limit" => { let ml = child .text() - .unwrap() + .context(" does not contain value")? .parse::() - .expect("parsing :"); - println!("memory limit: {} bytes ({} MiBs)", ml, ml / (1 << 20)); + .context("parsing failed")?; + self.tx + .send_event(Update::Property { + property_name: PropertyName::MemoryLimit, + property_value: ml.to_string(), + }) + .await?; self.limits.memory.replace(ml); } "input-path-pattern" => { let pat = child.text().unwrap().to_string(); - println!("test input file path pattern: {}", &pat); + self.tx + .send_event(Update::Property { + property_name: PropertyName::InputPathPattern, + property_value: pat.clone(), + }) + .await?; test_pattern.replace(pat); } "answer-path-pattern" => { let pat = child.text().unwrap().to_string(); - println!("test output file path pattern: {}", &pat); + self.tx + .send_event(Update::Property { + property_name: PropertyName::OutputPathPattern, + property_value: pat.clone(), + }) + .await?; ans_pattern.replace(pat); } "test-count" => {} "tests" => { - self.process_tests(child)?; + self.process_tests(child).await?; } _ => { - eprintln!( - "warning: unexpected tag in : {}", + let message = format!( + "unexpected tag in : {}", child.tag_name().name() ); + self.tx.send_event(Update::Warning(message)).await?; } } } @@ -109,17 +140,14 @@ impl<'a> Importer<'a> { .map(drop) } - fn process_file(&mut self, file_path: &str, file_type: &str) -> anyhow::Result<()> { - println!("processing {} of type {}", file_path, file_type); + async fn process_file(&mut self, file_path: &str, file_type: &str) -> anyhow::Result<()> { if !file_path.starts_with("files/") { - eprintln!("file doesn't start from 'files/'."); return Ok(()); } let file_name = file_path.trim_start_matches("files/"); let period_pos = match file_name.find('.') { Some(p) => p, None => { - eprintln!("file path does not contain extension"); return Ok(()); } }; @@ -130,24 +158,29 @@ impl<'a> Importer<'a> { if self.known_generators.contains(file_name) { FileCategory::Generator } else { - eprintln!( - "couldn't derive file category (stripped name: {}).", + let message = format!( + "couldn't derive file category (stripped name: {})", file_name ); + self.tx.send_event(Update::Warning(message)).await?; + return Ok(()); } } }; match category { FileCategory::Validator => { - // TODO + let message = "ignoring validators: not yet implemented".to_string(); + self.tx.send_event(Update::Warning(message)).await?; } FileCategory::Checker => { // do nothing here, processed separately } FileCategory::Generator => { let gen_dir = self.dest.join("generators").join(file_name); - std::fs::create_dir(&gen_dir).expect("create generator dir"); + tokio::fs::create_dir(&gen_dir) + .await + .expect("create generator dir"); let extension = match file_type { _ if file_type.starts_with("cpp.g++") => "cpp", "python.3" => "py", @@ -155,20 +188,23 @@ impl<'a> Importer<'a> { }; let dest_path = gen_dir.join(format!("main.{}", extension)); let src_path = self.src.join(file_path); - std::fs::copy(&src_path, &dest_path).with_context(|| { - format!( - "copy generator src from {} to {}", - src_path.display(), - dest_path.display() - ) - })?; + tokio::fs::copy(&src_path, &dest_path) + .await + .with_context(|| { + format!( + "copy generator src from {} to {}", + src_path.display(), + dest_path.display() + ) + })?; if extension == "cpp" { let cmakefile = gen_dir.join("CMakeLists.txt"); // currently, CMakeLists are same with generator let cmakedata = super::template::get_checker_cmakefile(super::template::CheckerOptions {}); - std::fs::write(cmakefile, cmakedata) + tokio::fs::write(cmakefile, cmakedata) + .await .context("write generator's CMakeLists.txt")?; } } @@ -176,8 +212,11 @@ impl<'a> Importer<'a> { Ok(()) } - fn process_checker(&mut self, node_checker: roxmltree::Node) -> anyhow::Result<()> { - println!("Importing checker"); + async fn process_checker( + &mut self, + node_checker: roxmltree::Node<'_, '_>, + ) -> anyhow::Result<()> { + self.tx.send_event(Update::ImportChecker).await?; assert_eq!(node_checker.attribute("type"), Some("testlib")); for child in node_checker.children() { if !child.is_element() { @@ -196,22 +235,29 @@ impl<'a> Importer<'a> { Ok(()) } - fn process_executable(&mut self, node_executable: roxmltree::Node) -> anyhow::Result<()> { + async fn process_executable( + &mut self, + node_executable: roxmltree::Node<'_, '_>, + ) -> anyhow::Result<()> { for node_source in node_executable.children() { if node_source.tag_name().name() != "source" { continue; } - let attr_path = node_source.attribute("path").unwrap(); - let attr_type = node_source.attribute("type").unwrap(); - self.process_file(attr_path, attr_type)?; + let attr_path = node_source + .attribute("path") + .context(" does not have path attribute")?; + let attr_type = node_source + .attribute("type") + .context(" does not have type attribute")?; + self.process_file(attr_path, attr_type).await?; } Ok(()) } - fn process_tests(&mut self, tests_node: roxmltree::Node) -> anyhow::Result<()> { - println!("Importing tests"); + async fn process_tests(&mut self, tests_node: roxmltree::Node<'_, '_>) -> anyhow::Result<()> { + self.tx.send_event(Update::ImportTests).await?; assert_eq!(tests_node.tag_name().name(), "tests"); - let mut cnt = 0; + let mut cnt: usize = 0; for test_node in tests_node.children() { if !test_node.is_element() { continue; @@ -247,19 +293,25 @@ impl<'a> Importer<'a> { } self.problem_cfg.tests.push(ts); } - println!("{} tests imported", cnt); + self.tx + .send_event(Update::ImportTestsDone { count: cnt }) + .await?; Ok(()) } - fn process_solutions(&mut self, node: roxmltree::Node) -> anyhow::Result<()> { - println!("Importing solution"); + async fn process_solutions(&mut self, node: roxmltree::Node<'_, '_>) -> anyhow::Result<()> { + self.tx.send_event(Update::ImportSolutions).await?; for solution_node in node.children() { if !solution_node.is_element() { continue; } - let tag = solution_node.attribute("tag").unwrap(); + let tag = solution_node + .attribute("tag") + .context("solution does not have attribute")?; if tag == "main" { - println!("importing main solution"); + self.tx + .send_event(Update::ImportSolution(tag.to_string())) + .await?; self.problem_cfg.primary_solution = Some("main".to_string()); let dir = self.dest.join("solutions/main"); let mut src_path = None; @@ -272,7 +324,9 @@ impl<'a> Importer<'a> { } } let src_path = src_path.unwrap(); - std::fs::create_dir_all(&dir).context("create main solution dir")?; + tokio::fs::create_dir_all(&dir) + .await + .context("create main solution dir")?; self.import_file(Path::new(&src_path), Path::new("solutions/main/main.cpp"))?; { let cmake_path = dir.join("CMakeLists.txt"); @@ -281,25 +335,36 @@ impl<'a> Importer<'a> { .context("write CMakeLists.txt for solution")?; } } else { - println!("skipping solution with tag {}: not main", &tag); + let message = format!( + "skipping solution with tag {}: importing non-main solutions not yet implemented", + tag + ); + self.tx.send_event(Update::Warning(message)).await?; } } Ok(()) } - fn process_names(&mut self, node_names: roxmltree::Node) { - println!("Importing name"); + async fn process_names(&mut self, node_names: roxmltree::Node<'_, '_>) -> anyhow::Result<()> { assert!(node_names.is_element()); for child in node_names.children() { if !child.is_element() { continue; } - let title = child.attribute("value").unwrap(); + let title = child + .attribute("value") + .context(" does not have value attribute")?; self.problem_cfg.title = title.to_string(); - println!("problem title: {}", &title); - return; + self.tx + .send_event(Update::Property { + property_name: PropertyName::ProblemTitle, + property_value: title.to_string(), + }) + .await?; + break; } + Ok(()) } fn process_problem(&mut self, node_problem: roxmltree::Node) { @@ -345,48 +410,63 @@ impl<'a> Importer<'a> { Ok(()) } - fn go(&mut self, node: roxmltree::Node) -> anyhow::Result<()> { - for child in node.children() { - self.feed(child)?; - } - Ok(()) + fn go<'b>( + &'b mut self, + node: roxmltree::Node<'b, 'b>, + ) -> Pin> + Send + 'b>> { + Box::pin(async move { + for child in node.children() { + self.feed(child).await?; + } + Ok(()) + }) } - fn feed(&mut self, node: roxmltree::Node) -> anyhow::Result<()> { + async fn feed(&mut self, node: roxmltree::Node<'_, '_>) -> anyhow::Result<()> { match node.tag_name().name() { - "names" => self.process_names(node), - "solutions" => self.process_solutions(node)?, - "judging" => self.process_judging_section(node)?, - "executable" => self.process_executable(node)?, - "checker" => self.process_checker(node)?, + "names" => self.process_names(node).await?, + "solutions" => self.process_solutions(node).await?, + "judging" => self.process_judging_section(node).await?, + "executable" => self.process_executable(node).await?, + "checker" => self.process_checker(node).await?, "problem" => { self.process_problem(node); - self.go(node)?; + self.go(node).await?; } _ => { - self.go(node)?; + self.go(node).await?; } } Ok(()) } - fn import_valuer_config(&mut self) -> anyhow::Result<()> { + async fn import_valuer_config(&mut self) -> anyhow::Result<()> { let valuer_cfg_path = self.src.join("files/valuer.cfg"); let config = if valuer_cfg_path.exists() { - println!("Importing valuer.cfg from {}", valuer_cfg_path.display()); - serde_yaml::to_string(&super::valuer_cfg::import(&valuer_cfg_path)?)? + self.tx.send_event(Update::ImportValuerConfig).await?; + let (config, warnings) = super::valuer_cfg::import(&valuer_cfg_path).await?; + for warn in warnings { + self.tx + .send_event(Update::Warning(format!( + "while importing valuer config: {}", + warn + ))) + .await?; + } + serde_yaml::to_string(&config)? } else { + self.tx.send_event(Update::DefaultValuerConfig).await?; include_str!("./default_valuer_config.yaml").to_string() }; - std::fs::write(self.dest.join("valuer.yaml"), config)?; + tokio::fs::write(self.dest.join("valuer.yaml"), config).await?; Ok(()) } - pub(crate) fn run(&mut self) -> anyhow::Result<()> { + pub(crate) async fn run(&mut self) -> anyhow::Result<()> { self.init_dirs()?; self.fill_manifest()?; - self.feed(self.doc)?; - self.import_valuer_config()?; + self.feed(self.doc).await?; + self.import_valuer_config().await?; Ok(()) } } diff --git a/src/ppc/src/import/solution.cmake b/src/pps/server/src/import/solution.cmake similarity index 100% rename from src/ppc/src/import/solution.cmake rename to src/pps/server/src/import/solution.cmake diff --git a/src/ppc/src/import/template.rs b/src/pps/server/src/import/template.rs similarity index 100% rename from src/ppc/src/import/template.rs rename to src/pps/server/src/import/template.rs diff --git a/src/ppc/src/import/valuer_cfg.pest b/src/pps/server/src/import/valuer_cfg.pest similarity index 100% rename from src/ppc/src/import/valuer_cfg.pest rename to src/pps/server/src/import/valuer_cfg.pest diff --git a/src/ppc/src/import/valuer_cfg.rs b/src/pps/server/src/import/valuer_cfg.rs similarity index 88% rename from src/ppc/src/import/valuer_cfg.rs rename to src/pps/server/src/import/valuer_cfg.rs index 9041bbde..197b92d4 100644 --- a/src/ppc/src/import/valuer_cfg.rs +++ b/src/pps/server/src/import/valuer_cfg.rs @@ -19,11 +19,16 @@ pub(super) struct P; struct Visitor<'a> { config: &'a mut svaluer::Config, tests_info: std::collections::HashMap, + warnings: Vec, } impl<'a> Visitor<'a> { - fn warn_not_sup(&self, feat: &str) { - eprintln!("not supported feature: {}", feat); + fn warn(&mut self, msg: String) { + self.warnings.push(msg); + } + + fn warn_not_sup(&mut self, feat: &str) { + self.warn(format!("not supported feature: {}", feat)); } fn visit_global_def(&mut self, _node: pest::iterators::Pair<'a, Rule>) { @@ -64,7 +69,7 @@ impl<'a> Visitor<'a> { assert!(num1 <= num2); for tid in num1..=num2 { if self.tests_info.insert(tid, group.name.clone()).is_some() { - eprintln!("test {} is mentioned more than once", tid); + self.warn(format!("test {} is mentioned more than once", tid)); } } } @@ -121,14 +126,18 @@ impl<'a> Visitor<'a> { } } -pub(crate) fn import(path: &Path) -> Result { - let input = std::fs::read_to_string(path)?; +pub(crate) async fn import( + path: &Path, +) -> Result<(svaluer::Config, Vec), ImportValuerCfgError> { + let input = tokio::fs::read_to_string(path).await?; let mut ast = P::parse(Rule::config, &input)?; let mut config = svaluer::Config { groups: Vec::new() }; let mut visitor = Visitor { config: &mut config, tests_info: std::collections::HashMap::new(), + warnings: Vec::new(), }; visitor.visit(ast.next().unwrap()); - Ok(config) + let warnings = std::mem::take(&mut visitor.warnings); + Ok((config, warnings)) } diff --git a/src/pps/server/src/lib.rs b/src/pps/server/src/lib.rs new file mode 100644 index 00000000..22b5ad14 --- /dev/null +++ b/src/pps/server/src/lib.rs @@ -0,0 +1,110 @@ +#![feature(is_sorted)] +#![allow(clippy::needless_lifetimes)] + +mod command; +mod compile; +mod import; +mod manifest; + +use anyhow::Context; +use std::{ + path::{Path, PathBuf}, + process::Stdio, + sync::Arc, +}; + +fn check_dir(path: &Path, allow_nonempty: bool) -> anyhow::Result<()> { + if !path.exists() { + anyhow::bail!("error: path {} not exists", path.display()); + } + if !path.is_dir() { + anyhow::bail!("error: path {} is not directory", path.display()); + } + if !allow_nonempty && path.read_dir().unwrap().next().is_some() { + anyhow::bail!("error: dir {} is not empty", path.display()); + } + Ok(()) +} + +#[cfg(target_os = "linux")] +#[tracing::instrument] +fn tune_linux() -> anyhow::Result<()> { + let mut current_limit = libc::rlimit { + rlim_cur: 0, + rlim_max: 0, + }; + unsafe { + if libc::prlimit(0, libc::RLIMIT_STACK, std::ptr::null(), &mut current_limit) != 0 { + anyhow::bail!("get current RLIMIT_STACK"); + } + } + let new_limit = libc::rlimit { + rlim_cur: current_limit.rlim_max, + rlim_max: current_limit.rlim_max, + }; + unsafe { + if libc::prlimit(0, libc::RLIMIT_STACK, &new_limit, std::ptr::null_mut()) != 0 { + anyhow::bail!("update RLIMIT_STACK"); + } + } + + Ok(()) +} + +#[tracing::instrument] +fn tune_resource_limits() -> anyhow::Result<()> { + #[cfg(target_os = "linux")] + tune_linux()?; + + Ok(()) +} + +/// Returns `rpc::Router` with all PPS api routes installed. +pub async fn create_server() -> anyhow::Result { + let mut builder = rpc::RouterBuilder::new(); + + let service = Service(Arc::new(ServiceState::get().await?)); + builder.add_route::(service.clone()); + builder.add_route::(service); + Ok(builder.build()) +} + +/// Starts PPS server on specified port on background tokio task. +#[tracing::instrument(skip(cancel))] +pub async fn serve( + port: u16, + cancel: tokio::sync::CancellationToken, +) -> anyhow::Result> { + tune_resource_limits()?; + + let router = create_server().await?; + + let bind_addr = std::net::SocketAddr::from(([127, 0, 0, 1], port)); + + let server = hyper::Server::try_bind(&bind_addr)? + .serve(router.as_make_service()) + .with_graceful_shutdown(async move { cancel.cancelled().await }); + let (tx, rx) = tokio::sync::oneshot::channel(); + tokio::task::spawn(async move { + server.await.expect("serve error"); + tx.send(()).ok(); + }); + Ok(rx) +} + +#[derive(Clone)] +pub struct Service(pub(crate) Arc); + +pub struct ServiceState { + /// JJS installation directory (used to find JTL binaries) + jjs_dir: PathBuf, +} + +impl ServiceState { + pub async fn get() -> anyhow::Result { + let jjs_dir: PathBuf = std::env::var_os("JJS_PATH") + .context("JJS_PATH not set")? + .into(); + Ok(ServiceState { jjs_dir }) + } +} diff --git a/src/ppc/src/manifest.rs b/src/pps/server/src/manifest.rs similarity index 100% rename from src/ppc/src/manifest.rs rename to src/pps/server/src/manifest.rs diff --git a/src/toolkit/Dockerfile b/src/toolkit/Dockerfile index b704f377..8dd007b5 100644 --- a/src/toolkit/Dockerfile +++ b/src/toolkit/Dockerfile @@ -6,7 +6,7 @@ ENV JJS_AUTH_DATA=/auth/authdata.yaml JJS_PATH=/opt/jjs PATH=/opt/jjs/bin:${PATH CMAKE_PREFIX_PATH=/opt/jjs/share/cmake:${CMAKE_PREFIX_PATH} RUN apt-get update && apt-get install -y libssl-dev COPY /jtl . -COPY /ppc ./bin/jjs-ppc +COPY /pps-cli ./bin/jjs-ppc COPY /cli ./bin/jjs-cli COPY /svaluer ./bin/jjs-svaluer VOLUME ["/auth"]