diff --git a/Cargo.lock b/Cargo.lock index 0a8ce23a06..53adbdee20 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6718,13 +6718,28 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6971da4d9c3aa03c3d8f3ff0f4155b534aad021292003895a469716b2a230378" dependencies = [ "base64 0.21.7", - "pem", + "pem 1.1.1", "ring 0.16.20", "serde", "serde_json", "simple_asn1", ] +[[package]] +name = "jsonwebtoken" +version = "9.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a87cc7a48537badeae96744432de36f4be2b4a34a05a5ef32e9dd8a1c169dde" +dependencies = [ + "base64 0.22.1", + "js-sys", + "pem 3.0.5", + "ring 0.17.8", + "serde", + "serde_json", + "simple_asn1", +] + [[package]] name = "kqueue" version = "1.0.8" @@ -8636,6 +8651,7 @@ version = "25.5.2" dependencies = [ "ipnet", "pegboard", + "rivet-util", "schemars", "serde", "url", @@ -8676,11 +8692,13 @@ name = "pegboard-manager" version = "25.5.2" dependencies = [ "anyhow", + "base64 0.22.1", "bytes", "futures-util", "hyper 0.14.31", "indoc 2.0.5", "json5", + "jsonwebtoken 9.3.1", "lazy_static", "nix 0.30.1", "notify", @@ -8691,6 +8709,7 @@ dependencies = [ "rand 0.8.5", "rand_chacha 0.3.1", "reqwest 0.12.12", + "ring 0.17.8", "rivet-logs", "rivet-util", "serde", @@ -8761,6 +8780,16 @@ dependencies = [ "base64 0.13.1", ] +[[package]] +name = "pem" +version = "3.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38af38e8470ac9dee3ce1bae1af9c1671fffc44ddfd8bd1d0a3445bf349a8ef3" +dependencies = [ + "base64 0.22.1", + "serde", +] + [[package]] name = "pem-rfc7468" version = "0.7.0" @@ -9973,7 +10002,7 @@ version = "25.5.2" dependencies = [ "base64 0.13.1", "global-error", - "jsonwebtoken", + "jsonwebtoken 8.3.0", "lazy_static", "prost 0.10.4", "rivet-util", @@ -10222,6 +10251,7 @@ dependencies = [ "reqwest 0.11.27", "rivet-config", "rivet-metrics", + "rivet-util", "rustls 0.23.25", "rustls-pemfile 2.2.0", "serde", @@ -10651,6 +10681,7 @@ dependencies = [ "rivet-runtime", "rivet-service-manager", "rivet-term", + "rivet-util", "rustyline", "s3-util", "serde", @@ -10800,6 +10831,7 @@ dependencies = [ "async-trait", "bcrypt", "chrono", + "fdb-util", "formatted-error", "futures-util", "global-error", @@ -12838,7 +12870,7 @@ dependencies = [ "chirp-client", "chirp-worker", "chrono", - "jsonwebtoken", + "jsonwebtoken 8.3.0", "lazy_static", "prost 0.10.4", "rivet-claims", diff --git a/examples/system-test-actor/src/managerClient.ts b/examples/system-test-actor/src/managerClient.ts index 71438431e3..352a950895 100644 --- a/examples/system-test-actor/src/managerClient.ts +++ b/examples/system-test-actor/src/managerClient.ts @@ -20,7 +20,7 @@ export function connectToManager() { let message = { init: { - runner_id: process.env.RIVET_RUNNER_ID + access_token: process.env.RIVET_ACCESS_TOKEN } }; let buffer = Buffer.from(JSON.stringify(message)); diff --git a/packages/common/api-helper/macros/src/lib.rs b/packages/common/api-helper/macros/src/lib.rs index d2a1c0d8b8..c067f5c707 100644 --- a/packages/common/api-helper/macros/src/lib.rs +++ b/packages/common/api-helper/macros/src/lib.rs @@ -3,7 +3,7 @@ extern crate proc_macro; use std::iter::FromIterator; use proc_macro::TokenStream; -use proc_macro2::{Literal, TokenStream as TokenStream2, TokenTree}; +use proc_macro2::{Literal, Spacing, TokenStream as TokenStream2, TokenTree}; use proc_macro_error::{emit_warning, proc_macro_error}; use quote::{format_ident, quote, ToTokens}; use syn::{ @@ -454,6 +454,22 @@ impl Parse for Endpoint { while let Some((tt, next)) = rest.token_tree() { match &tt { TokenTree::Punct(punct) if punct.as_char() == ':' => { + // Check for path separator (::) + if punct.spacing() == Spacing::Joint { + if let Some((tt2, next)) = next.token_tree() { + match &tt2 { + TokenTree::Punct(punct) if punct.as_char() == ':' => { + tts.push(tt); + tts.push(tt2); + rest = next; + + continue; + } + _ => {} + } + } + } + return Ok((tts.into_iter().collect::(), next)); } _ => { diff --git a/packages/common/chirp-workflow/core/src/db/crdb_nats/debug.rs b/packages/common/chirp-workflow/core/src/db/crdb_nats/debug.rs index d9db5e2539..c1e91f809a 100644 --- a/packages/common/chirp-workflow/core/src/db/crdb_nats/debug.rs +++ b/packages/common/chirp-workflow/core/src/db/crdb_nats/debug.rs @@ -535,6 +535,7 @@ impl DatabaseDebug for DatabaseCrdbNats { tags, NULL AS workflow_id, create_ts, + silence_ts, body, ack_ts FROM db_workflow.tagged_signals diff --git a/packages/common/chirp-workflow/core/src/db/fdb_sqlite_nats/keys/workflow.rs b/packages/common/chirp-workflow/core/src/db/fdb_sqlite_nats/keys/workflow.rs index 0d446989f5..b44d9107f6 100644 --- a/packages/common/chirp-workflow/core/src/db/fdb_sqlite_nats/keys/workflow.rs +++ b/packages/common/chirp-workflow/core/src/db/fdb_sqlite_nats/keys/workflow.rs @@ -926,7 +926,6 @@ impl TuplePack for ByNameAndTagSubspaceKey { &self.k, &self.v, ); - tracing::info!(?t, "---------------------"); t.pack(w, tuple_depth) } } diff --git a/packages/common/chirp-workflow/core/src/db/fdb_sqlite_nats/mod.rs b/packages/common/chirp-workflow/core/src/db/fdb_sqlite_nats/mod.rs index 2600331d54..927826f7bd 100644 --- a/packages/common/chirp-workflow/core/src/db/fdb_sqlite_nats/mod.rs +++ b/packages/common/chirp-workflow/core/src/db/fdb_sqlite_nats/mod.rs @@ -2813,7 +2813,6 @@ impl Database for DatabaseFdbSqliteNats { // TODO: Add config parameter in either fdb or sqlite to toggle this per wf let delete_instead_of_forget = workflow_name == "pegboard_client" || workflow_name == "pegboard_actor"; - // let delete_instead_of_forget = false; if delete_instead_of_forget { sql_execute!( diff --git a/packages/common/config/src/config/server/rivet/mod.rs b/packages/common/config/src/config/server/rivet/mod.rs index 46a72300d7..1a413834f9 100644 --- a/packages/common/config/src/config/server/rivet/mod.rs +++ b/packages/common/config/src/config/server/rivet/mod.rs @@ -850,3 +850,12 @@ pub struct Edge { #[serde(default)] pub redirect_logs_dir: Option, } + +impl Edge { + /// u16 used in the `Id` type for actors + pub fn datacenter_label(&self) -> u16 { + // Read first 2 bytes + let bytes = self.datacenter_id.as_bytes(); + u16::from_be_bytes([bytes[0], bytes[1]]) + } +} diff --git a/packages/common/fdb-util/src/codes.rs b/packages/common/fdb-util/src/codes.rs new file mode 100644 index 0000000000..3b2e05e5e4 --- /dev/null +++ b/packages/common/fdb-util/src/codes.rs @@ -0,0 +1,4 @@ +// FDB defines a range (0x40-0x4f) of user type codes for use with its tuple encoding system. +// https://github.com/apple/foundationdb/blob/main/design/tuple.md#user-type-codes + +pub const ID: u8 = 0x40; diff --git a/packages/common/fdb-util/src/keys.rs b/packages/common/fdb-util/src/keys.rs index 8db9112ec0..dc9be26364 100644 --- a/packages/common/fdb-util/src/keys.rs +++ b/packages/common/fdb-util/src/keys.rs @@ -51,6 +51,7 @@ pub const RUNNERS_BY_REMAINING_SLOTS: usize = 49; pub const REMAINING_SLOTS: usize = 50; pub const TOTAL_SLOTS: usize = 51; pub const IMAGE_ID: usize = 52; +pub const ACTOR2: usize = 53; // Directories with fdbrs must use string paths instead of tuples pub mod dir { @@ -114,6 +115,7 @@ pub fn key_from_str(key: &str) -> Option { "remaining_slots" => Some(REMAINING_SLOTS), "total_slots" => Some(TOTAL_SLOTS), "image_id" => Some(IMAGE_ID), + "actor2" => Some(ACTOR2), _ => None, } } diff --git a/packages/common/fdb-util/src/lib.rs b/packages/common/fdb-util/src/lib.rs index 385f813253..49f554c785 100644 --- a/packages/common/fdb-util/src/lib.rs +++ b/packages/common/fdb-util/src/lib.rs @@ -11,11 +11,12 @@ use foundationdb::{ self as fdb, future::FdbValue, options::DatabaseOption, - tuple::{self, PackResult, TuplePack, TupleUnpack}, + tuple::{self, PackResult, PackError, TuplePack, TupleUnpack}, KeySelector, RangeOption, }; pub mod keys; +pub mod codes; mod metrics; /// Makes the code blatantly obvious if its using a snapshot read. @@ -189,6 +190,39 @@ pub fn end_of_key_range(key: &[u8]) -> Vec { end_key } +// Copied from foundationdb crate +#[inline] +pub fn parse_bytes(input: &[u8], num: usize) -> PackResult<(&[u8], &[u8])> { + if input.len() < num { + Err(PackError::MissingBytes) + } else { + Ok((&input[num..], &input[..num])) + } +} + +// Copied from foundationdb crate +#[inline] +pub fn parse_byte(input: &[u8]) -> PackResult<(&[u8], u8)> { + if input.is_empty() { + Err(PackError::MissingBytes) + } else { + Ok((&input[1..], input[0])) + } +} + +// Copied from foundationdb crate +pub fn parse_code(input: &[u8], expected: u8) -> PackResult<&[u8]> { + let (input, found) = parse_byte(input)?; + if found == expected { + Ok(input) + } else { + Err(PackError::BadCode { + found, + expected: Some(expected), + }) + } +} + pub mod prelude { pub use std::result::Result::Ok; diff --git a/packages/common/server-cli/Cargo.toml b/packages/common/server-cli/Cargo.toml index 3a65391ef1..5e638ab7ae 100644 --- a/packages/common/server-cli/Cargo.toml +++ b/packages/common/server-cli/Cargo.toml @@ -28,6 +28,7 @@ rivet-pools.workspace = true rivet-runtime.workspace = true rivet-service-manager.workspace = true rivet-term.workspace = true +rivet-util.workspace = true rustyline = "15.0.0" s3-util.workspace = true serde = { version = "1.0.210", features = ["derive"] } diff --git a/packages/common/server-cli/src/util/fdb.rs b/packages/common/server-cli/src/util/fdb.rs index 6a56bc0f81..4f34b65127 100644 --- a/packages/common/server-cli/src/util/fdb.rs +++ b/packages/common/server-cli/src/util/fdb.rs @@ -20,6 +20,7 @@ pub enum SimpleTupleValue { I64(i64), F64(f64), Uuid(Uuid), + Id(rivet_util::Id), String(String), Bytes(Vec), Unknown(Vec), @@ -48,6 +49,7 @@ impl fmt::Display for SimpleTupleValue { SimpleTupleValue::I64(v) => write!(f, "{}", style(v).magenta()), SimpleTupleValue::F64(v) => write!(f, "{}", style(v).red()), SimpleTupleValue::Uuid(v) => write!(f, "{}", style(v).blue()), + SimpleTupleValue::Id(v) => write!(f, "{}", style(v).blue()), SimpleTupleValue::String(v) => { if v.is_empty() { write!(f, "{}", style("").dim()) @@ -106,6 +108,7 @@ impl TuplePack for SimpleTupleValue { SimpleTupleValue::I64(v) => v.pack(w, tuple_depth), SimpleTupleValue::F64(v) => v.pack(w, tuple_depth), SimpleTupleValue::Uuid(v) => v.pack(w, tuple_depth), + SimpleTupleValue::Id(v) => v.pack(w, tuple_depth), SimpleTupleValue::String(v) => v.pack(w, tuple_depth), SimpleTupleValue::Bytes(v) => v.pack(w, tuple_depth), SimpleTupleValue::Unknown(v) => { @@ -130,6 +133,9 @@ impl<'de> TupleUnpack<'de> for SimpleTupleValue { } else if let Ok((input, v)) = ::unpack(input, tuple_depth) { let v = SimpleTupleValue::Uuid(v); Ok((input, v)) + } else if let Ok((input, v)) = ::unpack(input, tuple_depth) { + let v = SimpleTupleValue::Id(v); + Ok((input, v)) } else if let Ok((input, v)) = ::unpack(input, tuple_depth) { let v = SimpleTupleValue::String(v); Ok((input, v)) @@ -327,6 +333,7 @@ impl From for SimpleValue { SimpleTupleValue::I64(v) => SimpleValue::I64(v), SimpleTupleValue::F64(v) => SimpleValue::F64(v), SimpleTupleValue::Uuid(v) => SimpleValue::Uuid(v), + SimpleTupleValue::Id(v) => SimpleValue::Bytes(v.as_bytes()), SimpleTupleValue::String(v) => SimpleValue::String(v), SimpleTupleValue::Bytes(v) | SimpleTupleValue::Unknown(v) => SimpleValue::Bytes(v), } @@ -356,6 +363,9 @@ impl SimpleTupleSegment { Some("uuid") => Uuid::from_str(value) .map(SimpleTupleValue::Uuid) .with_context(|| format!("Could not parse `{value}` as UUID"))?, + Some("id") => rivet_util::Id::from_str(value) + .map(SimpleTupleValue::Id) + .with_context(|| format!("Could not parse `{value}` as ID"))?, Some("bytes") | Some("b") => { let bytes = hex::decode(value.as_bytes()) .with_context(|| format!("Could not parse `{value}` as hex encoded bytes"))?; diff --git a/packages/common/util/core/Cargo.toml b/packages/common/util/core/Cargo.toml index 758a87877a..bb1e417d89 100644 --- a/packages/common/util/core/Cargo.toml +++ b/packages/common/util/core/Cargo.toml @@ -13,6 +13,7 @@ macros = [] async-trait = "0.1" bcrypt = "0.13.0" chrono = "0.4" +fdb-util.workspace = true formatted-error = { workspace = true, optional = true } futures-util = "0.3" global-error.workspace = true diff --git a/packages/common/util/core/src/id.rs b/packages/common/util/core/src/id.rs new file mode 100644 index 0000000000..9938aab4da --- /dev/null +++ b/packages/common/util/core/src/id.rs @@ -0,0 +1,373 @@ +use std::{fmt, str::FromStr}; + +use fdb_util::prelude::*; +use thiserror::Error; +use uuid::Uuid; + +#[derive(Debug, Error)] +pub enum IdError { + /// Input string length mismatch for a version. + #[error("invalid input length: expected {expected}, got {got}")] + InvalidLength { expected: usize, got: usize }, + + #[error("invalid input length: expected at least 4 characters")] + TooShort, + + /// Encountered a non-base36 character. + #[error("invalid base36 character: '{0}'")] + InvalidChar(char), + + /// Overflow or underflow in byte conversion. + #[error("byte conversion overflow/underflow")] + ByteError, + + /// UUID parse error. + #[error("invalid uuid: {0}")] + InvalidUuid(#[from] uuid::Error), + + /// Unsupported version. + #[error("unsupported version: {0}")] + UnsupportedVersion(u8), +} + +#[derive(Clone, Copy, PartialEq, Eq, Hash)] +pub enum Id { + V0(Uuid), + V1([u8; 18]), +} + +impl Id { + /// Construct V0 from uuid. + pub fn new_v0() -> Self { + Id::V0(Uuid::new_v4()) + } + + pub fn new_v1(label: u16) -> Self { + let mut data = [0u8; 18]; + data[..16].copy_from_slice(Uuid::new_v4().as_bytes()); + data[16..].copy_from_slice(&label.to_be_bytes()); + Id::V1(data) + } + + /// Construct V0 from uuid. + pub fn v0(uuid: Uuid) -> Self { + Id::V0(uuid) + } + + /// Construct V1 from components. + pub fn v1(uuid: Uuid, label: u16) -> Self { + let mut data = [0u8; 18]; + data[..16].copy_from_slice(uuid.as_bytes()); + data[16..].copy_from_slice(&label.to_be_bytes()); + Id::V1(data) + } + + pub fn label(&self) -> Option { + match self { + Id::V1(data) => { + let mut b = [0u8; 2]; + b.copy_from_slice(&data[16..]); + Some(u16::from_be_bytes(b)) + } + Id::V0(_) => None, + } + } +} + +impl Id { + pub fn as_v0(&self) -> Option { + match self { + Id::V0(uuid) => Some(*uuid), + _ => None, + } + } + + pub fn parse(s: &str) -> Result { + Self::from_str(s) + } + + /// Convert the ID to its byte representation. + pub fn as_bytes(&self) -> Vec { + match self { + Id::V0(uuid) => uuid.as_bytes().to_vec(), + Id::V1(data) => { + let mut bytes = [0; 19]; + bytes[0] = 1; // Version byte + bytes[1..].copy_from_slice(data); + + bytes.to_vec() + } + } + } + + /// Construct an ID from its byte representation. + pub fn from_bytes(bytes: &[u8]) -> Result { + if bytes.is_empty() { + return Err(IdError::TooShort); + } + + // Check if it's a UUID (16 bytes) + if bytes.len() == 16 { + let mut uuid_bytes = [0u8; 16]; + uuid_bytes.copy_from_slice(bytes); + return Ok(Id::V0(Uuid::from_bytes(uuid_bytes))); + } + + match bytes[0] { + 1 => { + if bytes.len() != 19 { + return Err(IdError::InvalidLength { + expected: 19, + got: bytes.len(), + }); + } + + let mut data = [0u8; 18]; + data.copy_from_slice(&bytes[1..]); + Ok(Id::V1(data)) + } + v => Err(IdError::UnsupportedVersion(v)), + } + } +} + +impl FromStr for Id { + type Err = IdError; + + fn from_str(s: &str) -> Result { + if s.len() < 4 { + return Err(IdError::TooShort); + } + + // V0: UUID string + if s.len() == 36 && s.chars().nth(8) == Some('-') { + return Ok(Id::V0(Uuid::parse_str(s)?)); + } + + let version = base36_mod256( + base36_char_to_base10(s.chars().nth(3).expect("length should be 4"))?, + base36_char_to_base10(s.chars().nth(2).expect("length should be 4"))?, + base36_char_to_base10(s.chars().nth(1).expect("length should be 4"))?, + base36_char_to_base10(s.chars().nth(0).expect("length should be 4"))?, + ); + + match version { + 1 => { + // v1 uses 19 bytes → 30 chars base36 + let expected_len = 30; + let got = s.len(); + if got != expected_len { + return Err(IdError::InvalidLength { + expected: expected_len, + got, + }); + } + + let buf: [u8; 19] = decode_base36(s)?; + + // slice off version byte + let mut data = [0u8; 18]; + data.copy_from_slice(&buf[1..]); + + Ok(Id::V1(data)) + } + v => Err(IdError::UnsupportedVersion(v)), + } + } +} + +impl fmt::Display for Id { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Id::V0(u) => write!(f, "{}", u.hyphenated()), + Id::V1(data) => { + // pack version + data into 19-byte array + let mut temp = [0u8; 19]; + temp[0] = 1; + temp[1..].copy_from_slice(data); + // encode to 30-char base36 + let mut buf = [b'0'; 30]; + for i in 0..buf.len() { + let mut rem = 0u32; + for byte in temp.iter_mut().rev() { + let v = (rem << 8) | (*byte as u32); + *byte = (v / 36) as u8; + rem = v % 36; + } + buf[i] = if rem < 10 { + b'0' + (rem as u8) + } else { + b'a' + ((rem - 10) as u8) + }; + } + // safe as ASCII + let s = unsafe { String::from_utf8_unchecked(buf.to_vec()) }; + write!(f, "{}", s) + } + } + } +} + +impl fmt::Debug for Id { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{}", self.to_string()) + } +} + +impl serde::Serialize for Id { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + serializer.serialize_str(&self.to_string()) + } +} + +impl<'de> serde::Deserialize<'de> for Id { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + let s = String::deserialize(deserializer)?; + Id::from_str(&s).map_err(serde::de::Error::custom) + } +} + +impl TuplePack for Id { + fn pack( + &self, + w: &mut W, + tuple_depth: TupleDepth, + ) -> std::io::Result { + w.write_all(&[fdb_util::codes::ID])?; + let bytes = self.as_bytes(); + let len = u32::try_from(bytes.len()) + .map_err(|err| std::io::Error::new(std::io::ErrorKind::InvalidData, err))?; + w.write_all(&bytes)?; + + Ok(VersionstampOffset::None { size: 1 + len }) + } +} + +impl<'de> TupleUnpack<'de> for Id { + fn unpack(input: &[u8], tuple_depth: TupleDepth) -> PackResult<(&[u8], Self)> { + let input = fdb_util::parse_code(input, fdb_util::codes::ID)?; + let (input, slice) = fdb_util::parse_bytes(input, 16)?; + + let v = Id::from_bytes(slice) + .map_err(|err| PackError::Message(format!("bad id format: {err}").into()))?; + + Ok((input, v)) + } +} + +impl From for Id { + fn from(uuid: Uuid) -> Self { + Id::V0(uuid) + } +} + +impl sqlx::Type for Id +where + DB: sqlx::Database, + Vec: sqlx::Type, +{ + fn type_info() -> DB::TypeInfo { + as sqlx::Type>::type_info() + } + + fn compatible(ty: &DB::TypeInfo) -> bool { + as sqlx::Type>::compatible(ty) + } +} + +impl<'q, DB> sqlx::Encode<'q, DB> for Id +where + DB: sqlx::Database, + Vec: sqlx::Encode<'q, DB>, +{ + fn encode_by_ref( + &self, + buf: &mut ::ArgumentBuffer<'q>, + ) -> Result { + self.as_bytes().encode_by_ref(buf) + } +} + +impl<'r, DB> sqlx::Decode<'r, DB> for Id +where + DB: sqlx::Database, + Vec: sqlx::Decode<'r, DB>, +{ + fn decode( + value: ::ValueRef<'r>, + ) -> Result { + let bytes = as sqlx::Decode>::decode(value)?; + Self::from_bytes(&bytes).map_err(|e| Box::new(e) as _) + } +} + +impl sqlx::postgres::PgHasArrayType for Id { + fn array_type_info() -> sqlx::postgres::PgTypeInfo { + sqlx::postgres::PgTypeInfo::with_name("_bytea") + } +} + +/// Decode a base36 string into a fixed-size byte array. +fn decode_base36(s: &str) -> Result<[u8; N], IdError> { + let mut data = [0u8; N]; + for c in s.chars().rev() { + let digit = base36_char_to_base10(c)? as u32; + + let mut carry = digit; + for i in 0..N { + let v = (data[i] as u32) * 36 + carry; + data[i] = (v & 0xFF) as u8; + carry = v >> 8; + } + if carry != 0 { + return Err(IdError::ByteError); + } + } + Ok(data) +} + +/// Converts a base36 char into a decimal number (not a byte). +fn base36_char_to_base10(c: char) -> Result { + match c { + '0'..='9' => Ok(c as u8 - b'0'), + 'a'..='z' => Ok(c as u8 - b'a' + 10), + _ => return Err(IdError::InvalidChar(c)), + } +} + +/// Converts 4 base36 digits into a byte. +fn base36_mod256(a: u8, b: u8, c: u8, d: u8) -> u8 { + let sum: u32 = (a as u32) * 46_656 + (b as u32) * 1_296 + (c as u32) * 36 + (d as u32) * 1; + (sum % 256) as u8 +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_v0_roundtrip() { + let uuid = Uuid::new_v4(); + let s = uuid.hyphenated().to_string(); + let id = Id::from_str(&s).unwrap(); + assert_eq!(id, Id::V0(uuid)); + assert_eq!(id.to_string(), s); + } + + #[test] + fn test_v1_roundtrip() { + let uuid = Uuid::new_v4(); + let label = 0xABCD; + let id = Id::new_v1(uuid, label); + let s = id.to_string(); + assert_eq!(s.len(), 30); + let parsed = Id::from_str(&s).unwrap(); + assert_eq!(parsed, id); + } +} diff --git a/packages/common/util/core/src/lib.rs b/packages/common/util/core/src/lib.rs index e51a3ffb6a..7384223479 100644 --- a/packages/common/util/core/src/lib.rs +++ b/packages/common/util/core/src/lib.rs @@ -1,3 +1,4 @@ +pub use id::Id; use rand::Rng; pub use rivet_util_macros as macros; use tokio::time::{Duration, Instant}; @@ -12,6 +13,7 @@ pub mod format; pub mod future; pub mod geo; pub mod glob; +pub mod id; pub mod math; pub mod req; pub mod route; diff --git a/packages/core/api/actor/src/assert.rs b/packages/core/api/actor/src/assert.rs index 0041dfe887..de9d5891cd 100644 --- a/packages/core/api/actor/src/assert.rs +++ b/packages/core/api/actor/src/assert.rs @@ -80,7 +80,7 @@ impl FromRedisValue for ActorValidationData { struct ActorValidationCacheKey { game_id: Uuid, env_id: Uuid, - actor_id: Uuid, + actor_id: util::Id, } // Implement CacheKey trait for ActorValidationCacheKey @@ -104,19 +104,16 @@ impl CacheKey for ActorValidationCacheKey { /// 4. For actors not in cache: /// a. Retrieves cluster and datacenter information /// b. Filters for valid datacenters with worker/guard pools -/// c. Concurrently validates each actor against multiple datacenters +/// c. Validate each actor against its datacenter /// d. Stores validation results in cache /// 5. Returns only the actor IDs that were successfully validated -/// -/// The validation uses a distributed approach, checking each actor across -/// multiple datacenters until it's found or all datacenters are exhausted. pub async fn actor_for_env( ctx: &Ctx, - actor_ids: &[Uuid], + actor_ids: &[util::Id], game_id: Uuid, env_id: Uuid, _error_code: Option<&'static str>, -) -> GlobalResult> { +) -> GlobalResult> { if actor_ids.is_empty() { return Ok(Vec::new()); } @@ -273,18 +270,29 @@ pub async fn actor_for_env( return Ok(cache); } - // Track validation results for each actor - let validation_results = Arc::new(Mutex::new(HashMap::::new())); + // Track validation results for each actor. This is done instead of collecting the results + // from the stream so that we can skip validation tasks. + let validation_results = Arc::new(Mutex::new(HashMap::::new())); // Create a stream of all datacenter + actor_id combinations let mut validation_tasks = - stream::iter(filtered_datacenters.into_iter().flat_map(|dc| { - let dc_clone = dc.clone(); - let ids = actor_ids_to_validate.clone(); - ids.into_iter() - .map(move |actor_id| (dc_clone.clone(), actor_id)) + stream::iter(actor_ids_to_validate.into_iter().flat_map(|actor_id| { + // If the actor has the datacenter label in its id, use that instead of all dcs + if let Some(label) = actor_id.label() { + filtered_datacenters + .iter() + .find(|dc| dc.label() == label) + .iter() + .map(|dc| (dc.name_id.clone(), actor_id)) + .collect::>() + } else { + filtered_datacenters + .iter() + .map(|dc| (dc.name_id.clone(), actor_id)) + .collect::>() + } })) - .map(|(dc, actor_id)| { + .map(|(dc_name_id, actor_id)| { let validation_results = validation_results.clone(); let game_name_id = game_name_id.clone(); let env_name_id = env_name_id.clone(); @@ -304,7 +312,7 @@ pub async fn actor_for_env( .config() .server()? .rivet - .edge_api_url_str(&dc.name_id)?, + .edge_api_url_str(&dc_name_id)?, bearer_access_token: ctx.auth().api_token.clone(), ..Default::default() }; diff --git a/packages/core/api/actor/src/route/actors.rs b/packages/core/api/actor/src/route/actors.rs index f21d3e8080..de38c1b417 100644 --- a/packages/core/api/actor/src/route/actors.rs +++ b/packages/core/api/actor/src/route/actors.rs @@ -33,7 +33,7 @@ pub struct GlobalEndpointTypeQuery { #[tracing::instrument(skip_all)] pub async fn get( ctx: Ctx, - actor_id: Uuid, + actor_id: util::Id, watch_index: WatchIndexQuery, query: GlobalEndpointTypeQuery, ) -> GlobalResult { @@ -42,7 +42,7 @@ pub async fn get( async fn get_inner( ctx: &Ctx, - actor_id: Uuid, + actor_id: util::Id, _watch_index: WatchIndexQuery, query: GlobalEndpointTypeQuery, ) -> GlobalResult { @@ -58,28 +58,35 @@ async fn get_inner( ) .await?; - // Fetch all datacenters - let clusters_res = ctx - .op(cluster::ops::get_for_game::Input { - game_ids: vec![game_id], - }) - .await?; - let cluster_id = unwrap!(clusters_res.games.first()).cluster_id; - let dc_list_res = ctx - .op(cluster::ops::datacenter::list::Input { - cluster_ids: vec![cluster_id], + let dcs = if let Some(label) = actor_id.label() { + ctx.op(cluster::ops::datacenter::get_for_label::Input { + labels: vec![label], }) - .await?; - let cluster = unwrap!(dc_list_res.clusters.into_iter().next()); - let dcs_res = ctx - .op(cluster::ops::datacenter::get::Input { + .await? + .datacenters + } else { + // Fetch all datacenters + let clusters_res = ctx + .op(cluster::ops::get_for_game::Input { + game_ids: vec![game_id], + }) + .await?; + let cluster_id = unwrap!(clusters_res.games.first()).cluster_id; + let dc_list_res = ctx + .op(cluster::ops::datacenter::list::Input { + cluster_ids: vec![cluster_id], + }) + .await?; + let cluster = unwrap!(dc_list_res.clusters.into_iter().next()); + ctx.op(cluster::ops::datacenter::get::Input { datacenter_ids: cluster.datacenter_ids, }) - .await?; + .await? + .datacenters + }; // Filter the datacenters that can be contacted - let filtered_datacenters = dcs_res - .datacenters + let filtered_datacenters = dcs .into_iter() .filter(|dc| crate::utils::filter_edge_dc(ctx.config(), dc).unwrap_or(false)) .collect::>(); @@ -153,7 +160,7 @@ pub async fn get_deprecated( let global = build_global_query_compat(&ctx, game_id, env_id).await?; let get_res = get_inner( &ctx, - actor_id, + actor_id.into(), watch_index, GlobalEndpointTypeQuery { global, @@ -360,7 +367,7 @@ pub struct DeleteQuery { #[tracing::instrument(skip_all)] pub async fn destroy( ctx: Ctx, - actor_id: Uuid, + actor_id: util::Id, query: DeleteQuery, ) -> GlobalResult { let CheckOutput { game_id, .. } = ctx @@ -375,28 +382,35 @@ pub async fn destroy( ) .await?; - // Fetch all datacenters - let clusters_res = ctx - .op(cluster::ops::get_for_game::Input { - game_ids: vec![game_id], + let dcs = if let Some(label) = actor_id.label() { + ctx.op(cluster::ops::datacenter::get_for_label::Input { + labels: vec![label], }) - .await?; - let cluster_id = unwrap!(clusters_res.games.first()).cluster_id; - let dc_list_res = ctx - .op(cluster::ops::datacenter::list::Input { - cluster_ids: vec![cluster_id], - }) - .await?; - let cluster = unwrap!(dc_list_res.clusters.into_iter().next()); - let dcs_res = ctx - .op(cluster::ops::datacenter::get::Input { + .await? + .datacenters + } else { + // Fetch all datacenters + let clusters_res = ctx + .op(cluster::ops::get_for_game::Input { + game_ids: vec![game_id], + }) + .await?; + let cluster_id = unwrap!(clusters_res.games.first()).cluster_id; + let dc_list_res = ctx + .op(cluster::ops::datacenter::list::Input { + cluster_ids: vec![cluster_id], + }) + .await?; + let cluster = unwrap!(dc_list_res.clusters.into_iter().next()); + ctx.op(cluster::ops::datacenter::get::Input { datacenter_ids: cluster.datacenter_ids, }) - .await?; + .await? + .datacenters + }; // Filter the datacenters that can be contacted - let filtered_datacenters = dcs_res - .datacenters + let filtered_datacenters = dcs .into_iter() .filter(|dc| crate::utils::filter_edge_dc(ctx.config(), dc).unwrap_or(false)) .collect::>(); @@ -480,7 +494,7 @@ pub async fn destroy_deprecated( let global = build_global_query_compat(&ctx, game_id, env_id).await?; destroy( ctx, - actor_id, + actor_id.into(), DeleteQuery { global, override_kill_timeout: query.override_kill_timeout, @@ -493,7 +507,7 @@ pub async fn destroy_deprecated( #[tracing::instrument(skip_all)] pub async fn upgrade( ctx: Ctx, - actor_id: Uuid, + actor_id: util::Id, body: models::ActorsUpgradeActorRequest, query: GlobalQuery, ) -> GlobalResult { @@ -509,28 +523,35 @@ pub async fn upgrade( ) .await?; - // Fetch all datacenters - let clusters_res = ctx - .op(cluster::ops::get_for_game::Input { - game_ids: vec![game_id], - }) - .await?; - let cluster_id = unwrap!(clusters_res.games.first()).cluster_id; - let dc_list_res = ctx - .op(cluster::ops::datacenter::list::Input { - cluster_ids: vec![cluster_id], + let dcs = if let Some(label) = actor_id.label() { + ctx.op(cluster::ops::datacenter::get_for_label::Input { + labels: vec![label], }) - .await?; - let cluster = unwrap!(dc_list_res.clusters.into_iter().next()); - let dcs_res = ctx - .op(cluster::ops::datacenter::get::Input { + .await? + .datacenters + } else { + // Fetch all datacenters + let clusters_res = ctx + .op(cluster::ops::get_for_game::Input { + game_ids: vec![game_id], + }) + .await?; + let cluster_id = unwrap!(clusters_res.games.first()).cluster_id; + let dc_list_res = ctx + .op(cluster::ops::datacenter::list::Input { + cluster_ids: vec![cluster_id], + }) + .await?; + let cluster = unwrap!(dc_list_res.clusters.into_iter().next()); + ctx.op(cluster::ops::datacenter::get::Input { datacenter_ids: cluster.datacenter_ids, }) - .await?; + .await? + .datacenters + }; // Filter the datacenters that can be contacted - let filtered_datacenters = dcs_res - .datacenters + let filtered_datacenters = dcs .into_iter() .filter(|dc| crate::utils::filter_edge_dc(ctx.config(), dc).unwrap_or(false)) .collect::>(); @@ -1034,7 +1055,8 @@ fn legacy_convert_actor_to_server( }) .transpose()?, environment: Uuid::nil(), - id: a.id, + // New ids are not supported by old servers + id: util::Id::parse(&a.id)?.as_v0().unwrap_or_else(Uuid::nil), lifecycle: Box::new(models::ServersLifecycle { kill_timeout: a.lifecycle.kill_timeout, }), diff --git a/packages/core/api/actor/src/route/logs.rs b/packages/core/api/actor/src/route/logs.rs index c49fe0866e..9540dda00b 100644 --- a/packages/core/api/actor/src/route/logs.rs +++ b/packages/core/api/actor/src/route/logs.rs @@ -137,15 +137,15 @@ pub async fn get_logs( }; // Build actor_ids map for lookup - let mut actor_id_to_index: std::collections::HashMap = + let mut actor_id_to_index: std::collections::HashMap = std::collections::HashMap::new(); let mut unique_actor_ids: Vec = Vec::new(); // Collect unique actor IDs and map them to indices for entry in &logs_res.entries { if !actor_id_to_index.contains_key(&entry.actor_id) { - actor_id_to_index.insert(entry.actor_id, unique_actor_ids.len() as i32); - unique_actor_ids.push(entry.actor_id.to_string()); + actor_id_to_index.insert(entry.actor_id.clone(), unique_actor_ids.len() as i32); + unique_actor_ids.push(entry.actor_id.clone()); } } diff --git a/packages/core/api/actor/src/route/mod.rs b/packages/core/api/actor/src/route/mod.rs index d794d6e83f..2901c8fd13 100644 --- a/packages/core/api/actor/src/route/mod.rs +++ b/packages/core/api/actor/src/route/mod.rs @@ -107,7 +107,31 @@ define_router! { ), }, - "actors" / Uuid: { + "actors" / "usage": { + GET: actors::usage( + query: actors::UsageQuery, + opt_auth: true, + rate_limit: { + buckets: [ + { count: 100, bucket: duration::minutes(1) }, + ], + }, + ), + }, + + "actors" / "query": { + GET: actors::query( + query: actors::QueryQuery, + opt_auth: true, + rate_limit: { + buckets: [ + { count: 100, bucket: duration::minutes(1) }, + ], + }, + ), + }, + + "actors" / util::Id: { GET: actors::get( query: actors::GlobalEndpointTypeQuery, opt_auth: true, @@ -129,7 +153,7 @@ define_router! { ), }, - "actors" / Uuid / "upgrade": { + "actors" / util::Id / "upgrade": { POST: actors::upgrade( query: GlobalQuery, body: models::ActorsUpgradeActorRequest, diff --git a/packages/core/services/build/src/ops/create.rs b/packages/core/services/build/src/ops/create.rs index f7f02cd76e..74a7360633 100644 --- a/packages/core/services/build/src/ops/create.rs +++ b/packages/core/services/build/src/ops/create.rs @@ -63,14 +63,14 @@ pub async fn get(ctx: &OperationCtx, input: &Input) -> GlobalResult { BUILD_INVALID, reason = "`resources` can only be specified if `allocation_type` = `multi`" ); - }, + } BuildAllocationType::Single => { ensure_with!( input.resources.is_none(), BUILD_INVALID, reason = "builds with `allocation_type` = `single` cannot have `resources`" ); - }, + } BuildAllocationType::Multi => { ensure_with!( input.resources.is_some(), diff --git a/packages/core/services/build/src/types.rs b/packages/core/services/build/src/types.rs index 242203a083..305589146d 100644 --- a/packages/core/services/build/src/types.rs +++ b/packages/core/services/build/src/types.rs @@ -72,10 +72,18 @@ pub struct BuildResources { impl ApiTryFrom for BuildResources { type Error = GlobalError; - + fn api_try_from(value: models::BuildsResources) -> GlobalResult { - ensure_with!(value.cpu >= 0, API_BAD_BODY, reason = "`resources.cpu` must be positive"); - ensure_with!(value.memory >= 0, API_BAD_BODY, reason = "`resources.memory` must be positive"); + ensure_with!( + value.cpu >= 0, + API_BAD_BODY, + reason = "`resources.cpu` must be positive" + ); + ensure_with!( + value.memory >= 0, + API_BAD_BODY, + reason = "`resources.memory` must be positive" + ); Ok(BuildResources { cpu_millicores: value.cpu.try_into()?, @@ -86,7 +94,7 @@ impl ApiTryFrom for BuildResources { impl ApiTryFrom for models::BuildsResources { type Error = GlobalError; - + fn api_try_from(value: BuildResources) -> GlobalResult { Ok(models::BuildsResources { cpu: value.cpu_millicores.try_into()?, diff --git a/packages/core/services/cluster/db/cluster/migrations/20250522174043_dc_label.down.sql b/packages/core/services/cluster/db/cluster/migrations/20250522174043_dc_label.down.sql new file mode 100644 index 0000000000..e69de29bb2 diff --git a/packages/core/services/cluster/db/cluster/migrations/20250522174043_dc_label.up.sql b/packages/core/services/cluster/db/cluster/migrations/20250522174043_dc_label.up.sql new file mode 100644 index 0000000000..02aadcc590 --- /dev/null +++ b/packages/core/services/cluster/db/cluster/migrations/20250522174043_dc_label.up.sql @@ -0,0 +1,5 @@ +ALTER TABLE datacenters + ADD COLUMN label BYTES AS (substring(datacenter_id::BYTES FROM 1 FOR 2)) STORED; + +CREATE UNIQUE INDEX datacenter_label_idx +ON datacenters (label); diff --git a/packages/core/services/cluster/src/ops/datacenter/get.rs b/packages/core/services/cluster/src/ops/datacenter/get.rs index 6afb084ef4..16d71b760a 100644 --- a/packages/core/services/cluster/src/ops/datacenter/get.rs +++ b/packages/core/services/cluster/src/ops/datacenter/get.rs @@ -15,7 +15,7 @@ pub struct Output { } #[derive(sqlx::FromRow)] -struct DatacenterRow { +pub(crate) struct DatacenterRow { datacenter_id: Uuid, cluster_id: Uuid, name_id: String, @@ -32,7 +32,7 @@ struct DatacenterRow { } impl DatacenterRow { - fn into_datacenter(self, config: &rivet_config::Config) -> GlobalResult { + pub(crate) fn into_datacenter(self, config: &rivet_config::Config) -> GlobalResult { Ok(Datacenter { datacenter_id: self.datacenter_id, cluster_id: self.cluster_id, diff --git a/packages/core/services/cluster/src/ops/datacenter/get_for_label.rs b/packages/core/services/cluster/src/ops/datacenter/get_for_label.rs new file mode 100644 index 0000000000..f3c894275d --- /dev/null +++ b/packages/core/services/cluster/src/ops/datacenter/get_for_label.rs @@ -0,0 +1,68 @@ +use chirp_workflow::prelude::*; + +use crate::types::Datacenter; +use crate::ops::datacenter::get::DatacenterRow; + +#[derive(Debug)] +pub struct Input { + pub labels: Vec, +} + +#[derive(Debug)] +pub struct Output { + pub datacenters: Vec, +} + +#[operation] +pub async fn cluster_datacenter_get_for_label(ctx: &OperationCtx, input: &Input) -> GlobalResult { + let datacenters = ctx + .cache() + .fetch_all_json("cluster.datacenters_get_for_label", input.labels.clone(), { + let ctx = ctx.clone(); + move |mut cache, labels| { + let ctx = ctx.clone(); + async move { + let dcs = get_dcs(ctx, labels).await?; + for dc in dcs { + cache.resolve(&dc.label(), dc); + } + + Ok(cache) + } + } + }) + .await?; + + Ok(Output { datacenters }) +} + +async fn get_dcs(ctx: OperationCtx, labels: Vec) -> GlobalResult> { + let dc_rows = sql_fetch_all!( + [ctx, DatacenterRow] + " + SELECT + datacenter_id, + cluster_id, + name_id, + display_name, + provider, + provider_datacenter_id, + provider_api_token, + pools2, + build_delivery_method, + prebakes_enabled, + create_ts, + guard_public_hostname_dns_parent, + guard_public_hostname_static + FROM db_cluster.datacenters@datacenter_label_idx + WHERE label = ANY($1) + ", + labels.into_iter().map(|x| x as i64).collect::>(), + ) + .await?; + + dc_rows + .into_iter() + .map(|row| row.into_datacenter(ctx.config())) + .collect::>>() +} diff --git a/packages/core/services/cluster/src/ops/datacenter/mod.rs b/packages/core/services/cluster/src/ops/datacenter/mod.rs index 712a6edc5b..1e83973ab2 100644 --- a/packages/core/services/cluster/src/ops/datacenter/mod.rs +++ b/packages/core/services/cluster/src/ops/datacenter/mod.rs @@ -6,3 +6,4 @@ pub mod server_discovery; pub mod server_spec_get; pub mod tls_get; pub mod topology_get; +pub mod get_for_label; diff --git a/packages/core/services/cluster/src/types.rs b/packages/core/services/cluster/src/types.rs index 659967d693..7fe0337601 100644 --- a/packages/core/services/cluster/src/types.rs +++ b/packages/core/services/cluster/src/types.rs @@ -34,6 +34,15 @@ pub struct Datacenter { pub guard_public_hostname: GuardPublicHostname, } +impl Datacenter { + /// u16 used in the `Id` type for actors + pub fn label(&self) -> u16 { + // Read first 2 bytes + let bytes = self.datacenter_id.as_bytes(); + u16::from_be_bytes([bytes[0], bytes[1]]) + } +} + #[derive(Serialize, Deserialize, Hash, Debug, Clone, Copy, PartialEq, Eq, FromRepr)] pub enum Provider { /// Servers are manually provisioned and connected. diff --git a/packages/core/services/dynamic-config/src/ops/get_config.rs b/packages/core/services/dynamic-config/src/ops/get_config.rs index c27e206d13..d04fcb48c4 100644 --- a/packages/core/services/dynamic-config/src/ops/get_config.rs +++ b/packages/core/services/dynamic-config/src/ops/get_config.rs @@ -42,7 +42,6 @@ pub async fn get_config(ctx: &OperationCtx, input: &Input) -> GlobalResult, - actor_id: Uuid, + actor_id: util::Id, _game_id: Uuid, env_id: Uuid, endpoint_type: Option, diff --git a/packages/edge/api/actor/src/route/actors.rs b/packages/edge/api/actor/src/route/actors.rs index e4c400abb4..11970f77ba 100644 --- a/packages/edge/api/actor/src/route/actors.rs +++ b/packages/edge/api/actor/src/route/actors.rs @@ -27,7 +27,7 @@ pub struct GlobalEndpointTypeQuery { #[tracing::instrument(skip_all)] pub async fn get( ctx: Ctx, - actor_id: Uuid, + actor_id: util::Id, watch_index: WatchIndexQuery, query: GlobalEndpointTypeQuery, ) -> GlobalResult { @@ -36,7 +36,7 @@ pub async fn get( async fn get_inner( ctx: &Ctx, - actor_id: Uuid, + actor_id: util::Id, _watch_index: WatchIndexQuery, query: GlobalEndpointTypeQuery, ) -> GlobalResult { @@ -119,7 +119,6 @@ pub async fn create( error = "`tags` must be `Map`" ); - let actor_id = Uuid::new_v4(); let network = body.network.unwrap_or_default(); let endpoint_type = body .runtime @@ -128,9 +127,10 @@ pub async fn create( .map(|n| n.endpoint_type) .map(ApiInto::api_into); - tracing::info!(?actor_id, ?tags, "creating actor with tags"); + let actor_id = if let build::types::BuildAllocationType::None = build.allocation_type { + let actor_id = Uuid::new_v4(); + tracing::info!(?actor_id, ?tags, "creating actor with tags"); - if let build::types::BuildAllocationType::None = build.allocation_type { let resources = match build.kind { build::types::BuildKind::DockerImage | build::types::BuildKind::OciBundle => { let resources = unwrap_with!( @@ -138,7 +138,7 @@ pub async fn create( API_BAD_BODY, error = "`resources` must be set for actors using Docker builds" ); - + (*resources).api_into() } build::types::BuildKind::JavaScript => { @@ -147,7 +147,7 @@ pub async fn create( API_BAD_BODY, error = "actors using JavaScript builds cannot set `resources`" ); - + pegboard::types::ActorResources::default_isolate() } }; @@ -252,7 +252,12 @@ pub async fn create( bail_with!(ACTOR_FAILED_TO_CREATE, error = "Actor failed before reaching a ready state."); } } + + util::Id::from(actor_id) } else { + let actor_id = util::Id::new_v1(ctx.config().server()?.rivet.edge()?.datacenter_label()); + tracing::info!(?actor_id, ?tags, "creating actor with tags"); + let allocated_fut = if network.wait_ready.unwrap_or_default() { std::future::pending().boxed() } else { @@ -354,7 +359,9 @@ pub async fn create( bail_with!(ACTOR_FAILED_TO_CREATE, error = "Actor failed before reaching a ready state."); } } - } + + actor_id + }; let actors_res = ctx .op(pegboard::ops::actor::get::Input { @@ -389,7 +396,7 @@ pub struct DeleteQuery { #[tracing::instrument(skip_all)] pub async fn destroy( ctx: Ctx, - actor_id: Uuid, + actor_id: util::Id, query: DeleteQuery, ) -> GlobalResult { let CheckOutput { game_id, env_id } = ctx @@ -446,7 +453,7 @@ pub async fn destroy( #[tracing::instrument(skip_all)] pub async fn upgrade( ctx: Ctx, - actor_id: Uuid, + actor_id: util::Id, body: models::ActorsUpgradeActorRequest, query: GlobalQuery, ) -> GlobalResult { diff --git a/packages/edge/api/actor/src/route/mod.rs b/packages/edge/api/actor/src/route/mod.rs index ca5f845996..279b7f863f 100644 --- a/packages/edge/api/actor/src/route/mod.rs +++ b/packages/edge/api/actor/src/route/mod.rs @@ -3,7 +3,6 @@ use hyper::{Body, Request, Response}; use rivet_api::models; use rivet_operation::prelude::*; use serde::Deserialize; -use uuid::Uuid; pub mod actors; @@ -79,7 +78,7 @@ define_router! { ), }, - "actors" / Uuid: { + "actors" / util::Id: { GET: actors::get( query: actors::GlobalEndpointTypeQuery, opt_auth: true, @@ -101,7 +100,7 @@ define_router! { ), }, - "actors" / Uuid / "upgrade": { + "actors" / util::Id / "upgrade": { POST: actors::upgrade( query: GlobalQuery, body: models::ActorsUpgradeActorRequest, diff --git a/packages/edge/infra/client/config/Cargo.toml b/packages/edge/infra/client/config/Cargo.toml index 1cf7ca7d12..695c7288d4 100644 --- a/packages/edge/infra/client/config/Cargo.toml +++ b/packages/edge/infra/client/config/Cargo.toml @@ -12,3 +12,4 @@ serde = { version = "1.0.195", features = ["derive"] } url = "2.2.2" uuid = { version = "1.6.1", features = ["v4"] } pegboard.workspace = true +rivet-util.workspace = true diff --git a/packages/edge/infra/client/config/src/runner_protocol.rs b/packages/edge/infra/client/config/src/runner_protocol.rs index 09dd15c017..d11a3f3736 100644 --- a/packages/edge/infra/client/config/src/runner_protocol.rs +++ b/packages/edge/infra/client/config/src/runner_protocol.rs @@ -1,15 +1,15 @@ use pegboard::protocol; use serde::{Deserialize, Serialize}; -use uuid::Uuid; #[derive(Debug, Serialize, Deserialize)] #[serde(rename_all = "snake_case", deny_unknown_fields)] pub enum ToManager { Init { - runner_id: Uuid, + // See `packages/edge/infra/client/manager/src/claims.rs` + access_token: String, }, ActorStateUpdate { - actor_id: Uuid, + actor_id: rivet_util::Id, generation: u32, state: ActorState, }, @@ -19,13 +19,13 @@ pub enum ToManager { #[serde(rename_all = "snake_case", deny_unknown_fields)] pub enum ToRunner { StartActor { - actor_id: Uuid, + actor_id: rivet_util::Id, generation: u32, env: protocol::HashableMap, metadata: protocol::Raw, }, SignalActor { - actor_id: Uuid, + actor_id: rivet_util::Id, generation: u32, signal: i32, persist_storage: bool, diff --git a/packages/edge/infra/client/echo/src/main.rs b/packages/edge/infra/client/echo/src/main.rs index 58475b6260..7f628935cb 100644 --- a/packages/edge/infra/client/echo/src/main.rs +++ b/packages/edge/infra/client/echo/src/main.rs @@ -65,7 +65,7 @@ async fn run_websocket_client(url: &str) -> Result<(), Box, @@ -21,7 +21,7 @@ pub struct Actor { impl Actor { pub fn new( - actor_id: Uuid, + actor_id: rivet_util::Id, generation: u32, config: protocol::ActorConfig, runner: Arc, @@ -37,9 +37,15 @@ impl Actor { pub async fn start(self: &Arc, ctx: &Arc) -> Result<()> { tracing::info!(actor_id=?self.actor_id, generation=?self.generation, "starting"); - // Write actor to DB + let runner_id = self + .config + .runner + .as_ref() + .context("should have runner")? + .runner_id(); let config_json = serde_json::to_vec(&self.config)?; + // Write actor to DB utils::sql::query(|| async { // NOTE: On conflict here in case this query runs but the command is not acknowledged sqlx::query(indoc!( @@ -47,16 +53,18 @@ impl Actor { INSERT INTO actors ( actor_id, generation, + runner_id, config, start_ts, image_id ) - VALUES (?1, ?2, ?3, ?4, ?5) + VALUES (?1, ?2, ?3, ?4, ?5, ?6) ON CONFLICT (actor_id, generation) DO NOTHING ", )) .bind(self.actor_id) .bind(self.generation as i64) + .bind(runner_id) .bind(&config_json) .bind(utils::now()) .bind(self.config.image.id) @@ -185,7 +193,7 @@ impl Actor { res = self.runner.observe(ctx, true) => break res?, res = actor_observer.next() => match res { Some(runner_protocol::ActorState::Running) => { - tracing::info!(actor_id=?self.actor_id, generation=?self.generation, "actor running"); + tracing::info!(actor_id=?self.actor_id, generation=?self.generation, "actor set to running"); let (pid, ports) = tokio::try_join!( self.runner.pid(), diff --git a/packages/edge/infra/client/manager/src/claims.rs b/packages/edge/infra/client/manager/src/claims.rs new file mode 100644 index 0000000000..ee431f983d --- /dev/null +++ b/packages/edge/infra/client/manager/src/claims.rs @@ -0,0 +1,64 @@ +use std::time::Duration; + +use anyhow::*; +use jsonwebtoken as jwt; +use serde::{Deserialize, Serialize}; +use uuid::Uuid; + +const CURRENT_KID: &str = "v1"; + +#[derive(Debug, Serialize, Deserialize)] +pub struct Claims { + iat: i64, + exp: i64, + jti: Uuid, + entitlements: Vec, +} + +impl Claims { + pub fn new(entitlement: Entitlement, ttl: Duration) -> Self { + let iat = crate::utils::now(); + let exp = iat + ttl.as_millis() as i64; + + Claims { + iat, + exp, + jti: Uuid::new_v4(), + entitlements: vec![entitlement], + } + } + + pub fn encode(&self, secret: &[u8]) -> Result { + let mut header = jwt::Header::new(jwt::Algorithm::HS256); + header.kid = Some(CURRENT_KID.to_string()); + + let token = jwt::encode(&header, &self, &jwt::EncodingKey::from_secret(secret))?; + + Ok(token) + } + + pub fn decode(token: &str, secret: &[u8]) -> Result { + let header = jwt::decode_header(token)?; + let kid = header.kid.context("token missing kid")?; + + ensure!(kid == CURRENT_KID, "invalid kid"); + + let token_data = jwt::decode::( + token, + &jwt::DecodingKey::from_secret(secret), + &jwt::Validation::default(), + )?; + + Ok(token_data.claims) + } + + pub fn ent(&self) -> &[Entitlement] { + &self.entitlements + } +} + +#[derive(Debug, Serialize, Deserialize)] +#[serde(rename_all = "snake_case")] +pub enum Entitlement { + Runner { runner_id: Uuid }, +} diff --git a/packages/edge/infra/client/manager/src/ctx.rs b/packages/edge/infra/client/manager/src/ctx.rs index c1790a3f16..43ff3d7b86 100644 --- a/packages/edge/infra/client/manager/src/ctx.rs +++ b/packages/edge/infra/client/manager/src/ctx.rs @@ -37,6 +37,7 @@ use uuid::Uuid; use crate::{ actor::Actor, + claims::{Claims, Entitlement}, event_sender::EventSender, image_download_handler::ImageDownloadHandler, metrics, @@ -72,7 +73,7 @@ pub enum RuntimeError { #[derive(sqlx::FromRow)] struct ActorRow { - actor_id: Uuid, + actor_id: rivet_util::Id, generation: i64, config: Vec, } @@ -88,6 +89,7 @@ struct RunnerRow { pub struct Ctx { config: Config, system: SystemInfo, + secret: Vec, // This requires a RwLock because of the reset functionality which reinitialized the entire database. It // should never be written to besides that. @@ -97,19 +99,21 @@ pub struct Ctx { pub(crate) image_download_handler: ImageDownloadHandler, pub(crate) runners: RwLock>>, - pub(crate) actors: RwLock>>, + pub(crate) actors: RwLock>>, } impl Ctx { pub fn new( config: Config, system: SystemInfo, + secret: Vec, pool: SqlitePool, tx: SplitSink>, Message>, ) -> Arc { Arc::new(Ctx { config, system, + secret, pool: RwLock::new(pool), tx: Mutex::new(tx), @@ -788,7 +792,7 @@ impl Ctx { .await?; let actor_rows = utils::sql::query(|| async { - sqlx::query_as::<_, (Uuid, i64, Option)>(indoc!( + sqlx::query_as::<_, (rivet_util::Id, i64, Option)>(indoc!( " UPDATE actors SET stop_ts = ?2 @@ -989,6 +993,10 @@ impl Ctx { &self.config.client } + pub fn secret(&self) -> &[u8] { + &self.secret + } + pub fn runners_path(&self) -> PathBuf { self.config().data_dir().join("runners") } @@ -1013,7 +1021,7 @@ impl Ctx { // Test bindings #[cfg(feature = "test")] impl Ctx { - pub fn actors(&self) -> &RwLock>> { + pub fn actors(&self) -> &RwLock>> { &self.actors } } diff --git a/packages/edge/infra/client/manager/src/lib.rs b/packages/edge/infra/client/manager/src/lib.rs index 5239d4c7ad..8d78e859e7 100644 --- a/packages/edge/infra/client/manager/src/lib.rs +++ b/packages/edge/infra/client/manager/src/lib.rs @@ -3,6 +3,8 @@ #[cfg(feature = "test")] mod actor; #[cfg(feature = "test")] +mod claims; +#[cfg(feature = "test")] mod ctx; #[cfg(feature = "test")] pub mod event_sender; diff --git a/packages/edge/infra/client/manager/src/main.rs b/packages/edge/infra/client/manager/src/main.rs index 42cf7d37e1..2ced8a8e78 100644 --- a/packages/edge/infra/client/manager/src/main.rs +++ b/packages/edge/infra/client/manager/src/main.rs @@ -20,6 +20,7 @@ use tracing_subscriber::{prelude::*, EnvFilter}; use url::Url; mod actor; +mod claims; mod ctx; mod event_sender; mod image_download_handler; @@ -35,6 +36,7 @@ const PROTOCOL_VERSION: u16 = 2; struct Init { config: Config, system: SystemInfo, + secret: Vec, pool: SqlitePool, } @@ -148,12 +150,15 @@ async fn init() -> Result { // Init project directories utils::init_dir(&config).await?; + let secret = utils::load_secret(&config).await?; + // Init sqlite db let pool = utils::init_sqlite_db(&config).await?; Ok(Init { config, system, + secret, pool, }) } @@ -187,7 +192,7 @@ async fn run(init: Init, first: bool) -> Result<()> { tracing::info!("connected to pegboard ws"); - let ctx = Ctx::new(init.config, init.system, init.pool, tx); + let ctx = Ctx::new(init.config, init.system, init.secret, init.pool, tx); tokio::try_join!( async { metrics_thread.await?.map_err(Into::into) }, diff --git a/packages/edge/infra/client/manager/src/runner/mod.rs b/packages/edge/infra/client/manager/src/runner/mod.rs index eefff36c80..074077662a 100644 --- a/packages/edge/infra/client/manager/src/runner/mod.rs +++ b/packages/edge/infra/client/manager/src/runner/mod.rs @@ -43,8 +43,8 @@ pub(crate) mod setup; const PID_POLL_INTERVAL: Duration = Duration::from_millis(1000); /// How long before killing a runner with a socket if it has not pinged. const PING_TIMEOUT: Duration = Duration::from_secs(5); -/// How long to wait when waiting for the socket to become ready before timing out. -const SOCKET_READY_TIMEOUT: Duration = Duration::from_secs(3); +/// How long to wait for the runner socket to become ready before timing out. +const SOCKET_READY_TIMEOUT: Duration = Duration::from_secs(5); /// How long to wait when getting the PID before timing out. const GET_PID_TIMEOUT: Duration = Duration::from_secs(256); // IMPORTANT: This cannot be just `rivet-` because this is used as a prefix to filter cgroup names @@ -80,7 +80,7 @@ pub struct Runner { /// Used instead of polling loops for faster updates. bump_channel: broadcast::Sender<()>, - actor_observer_tx: broadcast::Sender<(Uuid, u32, runner_protocol::ActorState)>, + actor_observer_tx: broadcast::Sender<(rivet_util::Id, u32, runner_protocol::ActorState)>, } impl Runner { @@ -117,6 +117,7 @@ impl Runner { pub async fn attach_socket( self: &Arc, + ctx: &Arc, ws_stream: WebSocketStream, ) -> Result<()> { match &self.comms { @@ -150,8 +151,9 @@ impl Runner { // Spawn a new thread to handle incoming messages let self2 = self.clone(); + let ctx2 = ctx.clone(); tokio::task::spawn(async move { - if let Err(err) = self2.receive_messages(ws_rx).await { + if let Err(err) = self2.receive_messages(&ctx2, ws_rx).await { tracing::error!(runner_id=?self2.runner_id, ?err, "socket error, killing runner"); if let Err(err) = self2.signal(Signal::SIGKILL).await { @@ -167,6 +169,7 @@ impl Runner { async fn receive_messages( &self, + ctx: &Ctx, mut ws_rx: SplitStream>, ) -> Result<()> { loop { @@ -182,17 +185,17 @@ impl Runner { Some(Ok(Message::Binary(buf))) => { let packet = serde_json::from_slice::(&buf)?; - self.process_packet(packet).await?; + self.process_packet(ctx, packet).await?; } Some(Ok(packet)) => bail!("runner socket unexpected packet: {packet:?}"), Some(Err(err)) => break Err(err).context("runner socket error"), }, - Err(_) => bail!("socket timed out"), + Err(_) => bail!("socket ping timed out"), } } } - async fn process_packet(&self, packet: runner_protocol::ToManager) -> Result<()> { + async fn process_packet(&self, ctx: &Ctx, packet: runner_protocol::ToManager) -> Result<()> { tracing::debug!(?packet, "runner received packet"); match packet { @@ -202,6 +205,8 @@ impl Runner { generation, state, } => { + // NOTE: We don't have to verify if the actor id given here is valid because only valid actors + // are listening to this runner's `actor_observer_tx`. This means invalid messages are ignored. // NOTE: No receivers is not an error let _ = self.actor_observer_tx.send((actor_id, generation, state)); } @@ -259,7 +264,7 @@ impl Runner { pub async fn start( self: &Arc, ctx: &Arc, - actor_id: Option, + actor_id: Option, ) -> Result> { tracing::info!(runner_id=?self.runner_id, "starting"); @@ -458,7 +463,7 @@ impl Runner { Ok(exit_code) } - pub fn new_actor_observer(&self, actor_id: Uuid, generation: u32) -> ActorObserver { + pub fn new_actor_observer(&self, actor_id: rivet_util::Id, generation: u32) -> ActorObserver { ActorObserver::new(actor_id, generation, self.actor_observer_tx.subscribe()) } @@ -645,16 +650,16 @@ impl Comms { } pub struct ActorObserver { - actor_id: Uuid, + actor_id: rivet_util::Id, generation: u32, - sub: broadcast::Receiver<(Uuid, u32, runner_protocol::ActorState)>, + sub: broadcast::Receiver<(rivet_util::Id, u32, runner_protocol::ActorState)>, } impl ActorObserver { fn new( - actor_id: Uuid, + actor_id: rivet_util::Id, generation: u32, - sub: broadcast::Receiver<(Uuid, u32, runner_protocol::ActorState)>, + sub: broadcast::Receiver<(rivet_util::Id, u32, runner_protocol::ActorState)>, ) -> Self { ActorObserver { actor_id, diff --git a/packages/edge/infra/client/manager/src/runner/setup.rs b/packages/edge/infra/client/manager/src/runner/setup.rs index b453b09c54..1213dca07d 100644 --- a/packages/edge/infra/client/manager/src/runner/setup.rs +++ b/packages/edge/infra/client/manager/src/runner/setup.rs @@ -5,7 +5,7 @@ use std::{ process::Stdio, result::Result::{Err, Ok}, sync::Arc, - time::Instant, + time::{Duration, Instant}, }; use anyhow::*; @@ -26,7 +26,14 @@ use tokio::{ use uuid::Uuid; use super::{oci_config, Runner}; -use crate::{ctx::Ctx, utils}; +use crate::{ + claims::{Claims, Entitlement}, + ctx::Ctx, + utils, +}; + +/// How long an access token given to a runner lasts. 1 Year. +const ACCESS_TOKEN_TTL: Duration = Duration::from_secs(60 * 60 * 24 * 365); #[derive(Hash, Debug, Clone, Copy, PartialEq, Eq, FromRepr)] pub enum Comms { @@ -254,12 +261,19 @@ impl Runner { runner_id=?self.runner_id, "building environment variables", ); + let access_token = Claims::new( + Entitlement::Runner { + runner_id: self.runner_id, + }, + ACCESS_TOKEN_TTL, + ) + .encode(ctx.secret())?; let env = user_config .process .env .into_iter() .chain( - self.build_default_env(ctx, &ports) + self.build_default_env(ctx, &ports, &access_token) .into_iter() .map(|(k, v)| format!("{k}={v}")), ) @@ -785,6 +799,7 @@ impl Runner { &self, ctx: &Ctx, ports: &protocol::HashableMap, + access_token: &str, ) -> HashMap { self.config .env @@ -810,8 +825,7 @@ impl Runner { "RIVET_API_ENDPOINT".to_string(), ctx.config().cluster.api_endpoint.to_string(), ), - // TODO: Replace with auth token - ("RIVET_RUNNER_ID".to_string(), self.runner_id.to_string()), + ("RIVET_ACCESS_TOKEN".to_string(), access_token.to_string()), ]) .collect() } diff --git a/packages/edge/infra/client/manager/src/utils/mod.rs b/packages/edge/infra/client/manager/src/utils/mod.rs index 35456c91da..1aaa71f403 100644 --- a/packages/edge/infra/client/manager/src/utils/mod.rs +++ b/packages/edge/infra/client/manager/src/utils/mod.rs @@ -5,12 +5,14 @@ use std::{ }; use anyhow::*; +use base64::{engine::general_purpose, Engine}; use indoc::indoc; use notify::{ event::{AccessKind, AccessMode}, Event, EventKind, RecommendedWatcher, RecursiveMode, Watcher, }; use pegboard_config::Config; +use ring::rand::{SecureRandom, SystemRandom}; use sql::SqlitePoolExt; use sqlx::{ migrate::MigrateDatabase, @@ -22,6 +24,7 @@ use sqlx::{ }; use tokio::{ fs, + io::AsyncWriteExt, sync::mpsc::{channel, Receiver}, }; @@ -92,6 +95,32 @@ pub async fn init_sqlite_db(config: &Config) -> Result { Ok(pool) } +pub async fn load_secret(config: &Config) -> Result> { + let secret_path = config.client.data_dir().join("secret.key"); + + // If the file doesn't exist, generate and persist it + if fs::metadata(&secret_path).await.is_err() { + // Generate new key + let rng = SystemRandom::new(); + let mut key = [0u8; 32]; + rng.fill(&mut key)?; + let b64 = general_purpose::STANDARD.encode(&key); + + let mut file = fs::File::create(&secret_path).await?; + file.write_all(b64.as_bytes()).await?; + file.flush().await?; + + Ok(key.into()) + } else { + let b64 = fs::read_to_string(&secret_path).await?; + let key = general_purpose::STANDARD.decode(b64.trim())?; + + ensure!(key.len() == 32, "Invalid key length"); + + Ok(key) + } +} + async fn build_sqlite_pool(db_url: &str) -> Result { let opts = db_url .parse::()? @@ -208,10 +237,12 @@ async fn init_sqlite_schema(pool: &SqlitePool) -> Result<()> { sqlx::query(indoc!( " CREATE TABLE IF NOT EXISTS actors ( - actor_id BLOB NOT NULL, -- UUID + actor_id BLOB NOT NULL, -- rivet_util::Id generation INTEGER NOT NULL, config BLOB NOT NULL, -- JSONB + runner_id NOT NULL, -- Already exists in `config`, set here for ease of querying + start_ts INTEGER NOT NULL, running_ts INTEGER, stop_ts INTEGER, @@ -257,7 +288,7 @@ async fn init_sqlite_schema(pool: &SqlitePool) -> Result<()> { sqlx::query(indoc!( " - CREATE INDEX IF NOT EXISTS runner_ports_id_idx + CREATE INDEX IF NOT EXISTS runner_ports_runner_id_idx ON runner_ports(runner_id) ", )) diff --git a/packages/edge/infra/client/manager/tests/client_rebuild_state.rs b/packages/edge/infra/client/manager/tests/client_rebuild_state.rs index 76de5502f2..8ecd443743 100644 --- a/packages/edge/infra/client/manager/tests/client_rebuild_state.rs +++ b/packages/edge/infra/client/manager/tests/client_rebuild_state.rs @@ -32,7 +32,7 @@ async fn client_rebuild_state() { let (close_tx, mut close_rx) = tokio::sync::watch::channel(()); let close_tx = Arc::new(close_tx); - let actor_id = Uuid::new_v4(); + let actor_id = rivet_util::Id::new_v1(0); let first_client = Arc::new(AtomicBool::new(true)); let port = portpicker::pick_unused_port().expect("no free ports"); @@ -69,7 +69,7 @@ async fn handle_connection( ctx_wrapper: Arc>>>, close_tx: Arc>, raw_stream: TcpStream, - actor_id: Uuid, + actor_id: rivet_util::Id, first_client: Arc, ) { tokio::spawn(async move { diff --git a/packages/edge/infra/client/manager/tests/client_state_external_kill.rs b/packages/edge/infra/client/manager/tests/client_state_external_kill.rs index 12e27c3155..f49b2df864 100644 --- a/packages/edge/infra/client/manager/tests/client_state_external_kill.rs +++ b/packages/edge/infra/client/manager/tests/client_state_external_kill.rs @@ -35,7 +35,7 @@ async fn client_state_external_kill() { let (close_tx, mut close_rx) = tokio::sync::watch::channel(()); let close_tx = Arc::new(close_tx); - let actor_id = Uuid::new_v4(); + let actor_id = rivet_util::Id::new_v1(0); let first_client = Arc::new(AtomicBool::new(true)); let actor_pid = Arc::new(AtomicI32::new(0)); @@ -84,7 +84,7 @@ async fn handle_connection( ctx_wrapper: Arc>>>, close_tx: Arc>, raw_stream: TcpStream, - actor_id: Uuid, + actor_id: rivet_util::Id, first_client: Arc, actor_pid: Arc, ) { diff --git a/packages/edge/infra/client/manager/tests/common.rs b/packages/edge/infra/client/manager/tests/common.rs index 572bbec314..16f642052c 100644 --- a/packages/edge/infra/client/manager/tests/common.rs +++ b/packages/edge/infra/client/manager/tests/common.rs @@ -67,7 +67,7 @@ pub async fn send_init_packet(tx: &mut SplitSink, Message>, - actor_id: Uuid, + actor_id: rivet_util::Id, ) { let cmd = protocol::Command::StartActor { actor_id, @@ -278,6 +278,8 @@ pub async fn start_client( ) { let system = system_info::fetch().await.unwrap(); + let secret = utils::load_secret(&config).await.unwrap(); + let pool = utils::init_sqlite_db(&config).await.unwrap(); // Build WS connection URL @@ -298,7 +300,7 @@ pub async fn start_client( tracing::info!("connected"); - let ctx = Ctx::new(config, system, pool, tx); + let ctx = Ctx::new(config, system, secret, pool, tx); // Share reference { diff --git a/packages/edge/infra/client/manager/tests/container_external_kill.rs b/packages/edge/infra/client/manager/tests/container_external_kill.rs index ed2babd53b..cc48f4da2d 100644 --- a/packages/edge/infra/client/manager/tests/container_external_kill.rs +++ b/packages/edge/infra/client/manager/tests/container_external_kill.rs @@ -57,7 +57,7 @@ async fn handle_connection( guard.clone().unwrap() }; - let actor_id = Uuid::new_v4(); + let actor_id = rivet_util::Id::new_v1(0); // Receive messages from socket while let Some(msg) = rx.next().await { diff --git a/packages/edge/infra/client/manager/tests/container_lifecycle.rs b/packages/edge/infra/client/manager/tests/container_lifecycle.rs index cf94a6760d..7036c3a33e 100644 --- a/packages/edge/infra/client/manager/tests/container_lifecycle.rs +++ b/packages/edge/infra/client/manager/tests/container_lifecycle.rs @@ -68,7 +68,7 @@ async fn handle_connection( guard.clone().unwrap() }; - let actor_id = Uuid::new_v4(); + let actor_id = rivet_util::Id::new_v1(0); let mut actor_state = State::None; // Receive messages from socket diff --git a/packages/edge/infra/guard/core/Cargo.toml b/packages/edge/infra/guard/core/Cargo.toml index 3b4b2ad18d..90737f4631 100644 --- a/packages/edge/infra/guard/core/Cargo.toml +++ b/packages/edge/infra/guard/core/Cargo.toml @@ -29,6 +29,7 @@ uuid = { version = "1.16.0", features = ["v4", "serde"] } rivet-metrics.workspace = true prometheus = "0.13.3" rivet-config.workspace = true +rivet-util.workspace = true rand = "0.8.5" cluster.workspace = true moka = { version = "0.12", features = ["future"] } diff --git a/packages/edge/infra/guard/core/src/proxy_service.rs b/packages/edge/infra/guard/core/src/proxy_service.rs index e381dd1b91..bdf3031426 100644 --- a/packages/edge/infra/guard/core/src/proxy_service.rs +++ b/packages/edge/infra/guard/core/src/proxy_service.rs @@ -99,7 +99,7 @@ impl http_body::Body for ResponseBody { // Routing types #[derive(Clone, Debug)] pub struct RouteTarget { - pub actor_id: Option, + pub actor_id: Option, pub server_id: Option, pub host: String, pub port: u16, @@ -213,7 +213,9 @@ pub enum MiddlewareResponse { } pub type MiddlewareFn = Arc< - dyn for<'a> Fn(&'a Uuid) -> futures::future::BoxFuture<'a, GlobalResult> + dyn for<'a> Fn( + &'a rivet_util::Id, + ) -> futures::future::BoxFuture<'a, GlobalResult> + Send + Sync, >; @@ -476,7 +478,10 @@ impl ProxyState { } #[tracing::instrument(skip_all)] - async fn get_middleware_config(&self, actor_id: &Uuid) -> GlobalResult { + async fn get_middleware_config( + &self, + actor_id: &rivet_util::Id, + ) -> GlobalResult { // Call the middleware function with a timeout let default_timeout = Duration::from_secs(5); // Default 5 seconds diff --git a/packages/edge/infra/guard/core/src/util.rs b/packages/edge/infra/guard/core/src/util.rs index a2108c9ebd..dc00c3d2e8 100644 --- a/packages/edge/infra/guard/core/src/util.rs +++ b/packages/edge/infra/guard/core/src/util.rs @@ -1,12 +1,11 @@ use global_error::prelude::*; -use uuid::Uuid; use crate::types::{EndpointType, GameGuardProtocol}; /// Build a hostname or path string for the given endpoint type pub fn build_actor_hostname_and_path( endpoint_type: EndpointType, - actor_id: &Uuid, + actor_id: &rivet_util::Id, port_name: &str, guard_hostname: &str, _protocol: GameGuardProtocol, diff --git a/packages/edge/infra/guard/server/src/main.rs b/packages/edge/infra/guard/server/src/main.rs index 0441c8495c..f08a17254f 100644 --- a/packages/edge/infra/guard/server/src/main.rs +++ b/packages/edge/infra/guard/server/src/main.rs @@ -111,13 +111,11 @@ async fn main_inner() -> GlobalResult<()> { fn create_middleware_function( ctx: StandaloneCtx, ) -> Arc< - dyn for<'a> Fn( - &'a uuid::Uuid, - ) -> futures::future::BoxFuture<'a, GlobalResult> + dyn for<'a> Fn(&'a util::Id) -> futures::future::BoxFuture<'a, GlobalResult> + Send + Sync, > { - Arc::new(move |_actor_id: &uuid::Uuid| { + Arc::new(move |_actor_id: &util::Id| { let _ctx = ctx.clone(); Box::pin(async move { diff --git a/packages/edge/infra/guard/server/src/routing/actor.rs b/packages/edge/infra/guard/server/src/routing/actor.rs index 123db49212..2646cbd77e 100644 --- a/packages/edge/infra/guard/server/src/routing/actor.rs +++ b/packages/edge/infra/guard/server/src/routing/actor.rs @@ -100,7 +100,7 @@ async fn try_route_with_endpoint_type( // For hostname-based routing, extract from hostname if let Some(captures) = hostname_regex.captures(hostname) { match (captures.name("actor_id"), captures.name("port_name")) { - (Some(actor_id), Some(port_name)) => match Uuid::parse_str(actor_id.as_str()) { + (Some(actor_id), Some(port_name)) => match util::Id::parse(actor_id.as_str()) { Ok(actor_id) => (actor_id, port_name.as_str().to_string()), Err(_) => return Ok(None), }, @@ -127,7 +127,7 @@ async fn try_route_with_endpoint_type( if let Some(captures) = path_regex.captures(path) { match (captures.name("actor_id"), captures.name("port_name")) { - (Some(actor_id), Some(port_name)) => match Uuid::parse_str(actor_id.as_str()) { + (Some(actor_id), Some(port_name)) => match util::Id::parse(actor_id.as_str()) { Ok(actor_id) => (actor_id, port_name.as_str().to_string()), Err(_) => return Ok(None), }, @@ -173,7 +173,7 @@ async fn try_route_with_endpoint_type( #[tracing::instrument(skip_all, fields(?actor_id, %port_name, %path_to_forward))] async fn find_actor( ctx: &StandaloneCtx, - actor_id: &Uuid, + actor_id: &util::Id, port_name: &str, path_to_forward: String, ) -> GlobalResult> { @@ -182,11 +182,17 @@ async fn find_actor( ctx.fdb() .await? .run(|tx, _mc| async move { - let create_ts_key = pegboard::keys::actor::CreateTsKey::new(*actor_id); - let exists = tx - .get(&pegboard::keys::subspace().pack(&create_ts_key), SNAPSHOT) - .await? - .is_some(); + let exists = if let Some(actor_id) = actor_id.as_v0() { + let create_ts_key = pegboard::keys::actor::CreateTsKey::new(actor_id); + tx.get(&pegboard::keys::subspace().pack(&create_ts_key), SNAPSHOT) + .await? + .is_some() + } else { + let create_ts_key = pegboard::keys::actor2::CreateTsKey::new(*actor_id); + tx.get(&pegboard::keys::subspace().pack(&create_ts_key), SNAPSHOT) + .await? + .is_some() + }; Ok(exists) }) @@ -266,27 +272,51 @@ async fn find_actor( #[tracing::instrument(skip_all, fields(?actor_id))] async fn fetch_proxied_ports( ctx: &StandaloneCtx, - actor_id: &Uuid, -) -> GlobalResult>> { + actor_id: &util::Id, +) -> GlobalResult>> { // Fetch ports ctx.fdb() .await? .run(|tx, _mc| async move { - let proxied_ports_key = pegboard::keys::actor::ProxiedPortsKey::new(*actor_id); - let raw = tx - .get( - &pegboard::keys::subspace().pack(&proxied_ports_key), - // NOTE: This is not SERIALIZABLE because we don't want to conflict with port updates - // and its not important if its slightly stale - SNAPSHOT, - ) - .await?; - if let Some(raw) = raw { - Ok(Some(proxied_ports_key.deserialize(&raw).map_err(|x| { - fdb::FdbBindingError::CustomError(x.into()) - })?)) + if let Some(actor_id) = actor_id.as_v0() { + let proxied_ports_key = pegboard::keys::actor::ProxiedPortsKey::new(actor_id); + let raw = tx + .get( + &pegboard::keys::subspace().pack(&proxied_ports_key), + // NOTE: This is not SERIALIZABLE because we don't want to conflict with port updates + // and its not important if its slightly stale + SNAPSHOT, + ) + .await?; + if let Some(raw) = raw { + Ok(Some( + proxied_ports_key + .deserialize(&raw) + .map_err(|x| fdb::FdbBindingError::CustomError(x.into()))? + .into_iter() + .map(Into::into) + .collect::>(), + )) + } else { + Ok(None) + } } else { - Ok(None) + let proxied_ports_key = pegboard::keys::actor2::ProxiedPortsKey::new(*actor_id); + let raw = tx + .get( + &pegboard::keys::subspace().pack(&proxied_ports_key), + // NOTE: This is not SERIALIZABLE because we don't want to conflict with port updates + // and its not important if its slightly stale + SNAPSHOT, + ) + .await?; + if let Some(raw) = raw { + Ok(Some(proxied_ports_key.deserialize(&raw).map_err(|x| { + fdb::FdbBindingError::CustomError(x.into()) + })?)) + } else { + Ok(None) + } } }) .custom_instrument(tracing::info_span!("fetch_proxied_ports_tx")) diff --git a/packages/edge/infra/guard/server/src/routing/actor_routes.rs b/packages/edge/infra/guard/server/src/routing/actor_routes.rs index 066b5a7bc0..45b86d59a7 100644 --- a/packages/edge/infra/guard/server/src/routing/actor_routes.rs +++ b/packages/edge/infra/guard/server/src/routing/actor_routes.rs @@ -154,7 +154,7 @@ pub async fn route_via_route_config( #[tracing::instrument(skip_all)] async fn find_actor_targets( ctx: &StandaloneCtx, - actor_id: &Uuid, + actor_id: &util::Id, _dc_id: Uuid, // Unused but kept for API compatibility path_to_forward: &str, ) -> GlobalResult>> { @@ -163,22 +163,47 @@ async fn find_actor_targets( .fdb() .await? .run(|tx, _mc| async move { - // NOTE: This is not SERIALIZABLE because we don't want to conflict with port updates - // and its not important if its slightly stale - let proxied_ports_key = pegboard::keys::actor::ProxiedPortsKey::new(*actor_id); - let raw = tx - .get( - &pegboard::keys::subspace().pack(&proxied_ports_key), - fdb_util::SNAPSHOT, - ) - .await?; - if let Some(raw) = raw { - let proxied_ports = proxied_ports_key - .deserialize(&raw) - .map_err(|x| foundationdb::FdbBindingError::CustomError(x.into()))?; - Ok(Some(proxied_ports)) + if let Some(actor_id) = actor_id.as_v0() { + // NOTE: This is not SERIALIZABLE because we don't want to conflict with port updates + // and its not important if its slightly stale + let proxied_ports_key = pegboard::keys::actor::ProxiedPortsKey::new(actor_id); + let raw = tx + .get( + &pegboard::keys::subspace().pack(&proxied_ports_key), + fdb_util::SNAPSHOT, + ) + .await?; + if let Some(raw) = raw { + let proxied_ports = proxied_ports_key + .deserialize(&raw) + .map_err(|x| foundationdb::FdbBindingError::CustomError(x.into()))?; + Ok(Some( + proxied_ports + .into_iter() + .map(Into::into) + .collect::>(), + )) + } else { + Ok(None) + } } else { - Ok(None) + // NOTE: This is not SERIALIZABLE because we don't want to conflict with port updates + // and its not important if its slightly stale + let proxied_ports_key = pegboard::keys::actor2::ProxiedPortsKey::new(*actor_id); + let raw = tx + .get( + &pegboard::keys::subspace().pack(&proxied_ports_key), + fdb_util::SNAPSHOT, + ) + .await?; + if let Some(raw) = raw { + let proxied_ports = proxied_ports_key + .deserialize(&raw) + .map_err(|x| foundationdb::FdbBindingError::CustomError(x.into()))?; + Ok(Some(proxied_ports)) + } else { + Ok(None) + } } }) .await?; diff --git a/packages/edge/services/pegboard/db/runner-log/migrations/20200101000000_init.up.sql b/packages/edge/services/pegboard/db/runner-log/migrations/20200101000000_init.up.sql index 287855eb37..f9b2d5dd6a 100644 --- a/packages/edge/services/pegboard/db/runner-log/migrations/20200101000000_init.up.sql +++ b/packages/edge/services/pegboard/db/runner-log/migrations/20200101000000_init.up.sql @@ -1,7 +1,6 @@ - CREATE TABLE IF NOT EXISTS runner_logs ( runner_id UUID, - actor_id UUID, -- When not set will be the NIL UUID (all zeros) + actor_id String, stream_type UInt8, -- pegboard::types::LogsStreamType ts DateTime64 (9), message String diff --git a/packages/edge/services/pegboard/src/keys/actor2.rs b/packages/edge/services/pegboard/src/keys/actor2.rs new file mode 100644 index 0000000000..cf7cb9e6d5 --- /dev/null +++ b/packages/edge/services/pegboard/src/keys/actor2.rs @@ -0,0 +1,189 @@ +use std::result::Result::Ok; + +use anyhow::*; +use chirp_workflow::prelude::*; +use fdb_util::prelude::*; + +use crate::types::GameGuardProtocol; + +#[derive(Debug)] +pub struct CreateTsKey { + actor_id: util::Id, +} + +impl CreateTsKey { + pub fn new(actor_id: util::Id) -> Self { + CreateTsKey { actor_id } + } +} + +impl FormalKey for CreateTsKey { + // Timestamp. + type Value = i64; + + fn deserialize(&self, raw: &[u8]) -> Result { + Ok(i64::from_be_bytes(raw.try_into()?)) + } + + fn serialize(&self, value: Self::Value) -> Result> { + Ok(value.to_be_bytes().to_vec()) + } +} + +impl TuplePack for CreateTsKey { + fn pack( + &self, + w: &mut W, + tuple_depth: TupleDepth, + ) -> std::io::Result { + let t = (ACTOR2, DATA, self.actor_id, CREATE_TS); + t.pack(w, tuple_depth) + } +} + +impl<'de> TupleUnpack<'de> for CreateTsKey { + fn unpack(input: &[u8], tuple_depth: TupleDepth) -> PackResult<(&[u8], Self)> { + let (input, (_, _, actor_id, _)) = + <(usize, usize, util::Id, usize)>::unpack(input, tuple_depth)?; + let v = CreateTsKey { actor_id }; + + Ok((input, v)) + } +} + +#[derive(Debug)] +pub struct WorkflowIdKey { + actor_id: util::Id, +} + +impl WorkflowIdKey { + pub fn new(actor_id: util::Id) -> Self { + WorkflowIdKey { actor_id } + } +} + +impl FormalKey for WorkflowIdKey { + type Value = Uuid; + + fn deserialize(&self, raw: &[u8]) -> Result { + Ok(Uuid::from_slice(raw)?) + } + + fn serialize(&self, value: Self::Value) -> Result> { + Ok(value.as_bytes().to_vec()) + } +} + +impl TuplePack for WorkflowIdKey { + fn pack( + &self, + w: &mut W, + tuple_depth: TupleDepth, + ) -> std::io::Result { + let t = (ACTOR2, DATA, self.actor_id, WORKFLOW_ID); + t.pack(w, tuple_depth) + } +} + +impl<'de> TupleUnpack<'de> for WorkflowIdKey { + fn unpack(input: &[u8], tuple_depth: TupleDepth) -> PackResult<(&[u8], Self)> { + let (input, (_, _, actor_id, _)) = + <(usize, usize, util::Id, usize)>::unpack(input, tuple_depth)?; + + let v = WorkflowIdKey { actor_id }; + + Ok((input, v)) + } +} + +#[derive(Debug)] +pub struct ProxiedPortsKey { + pub actor_id: util::Id, +} + +impl ProxiedPortsKey { + pub fn new(actor_id: util::Id) -> Self { + ProxiedPortsKey { actor_id } + } + + pub fn subspace() -> ProxiedPortsSubspaceKey { + ProxiedPortsSubspaceKey::new() + } +} + +impl FormalKey for ProxiedPortsKey { + type Value = Vec; + + fn deserialize(&self, raw: &[u8]) -> Result { + serde_json::from_slice(raw).map_err(Into::into) + } + + fn serialize(&self, value: Self::Value) -> Result> { + serde_json::to_vec(&value).map_err(Into::into) + } +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct ProxiedPort { + pub port_name: String, + pub create_ts: i64, + pub lan_hostname: String, + pub source: u16, + pub ingress_port_number: u16, + pub protocol: GameGuardProtocol, +} + +impl From for ProxiedPort { + fn from(value: super::actor::ProxiedPort) -> Self { + ProxiedPort { + port_name: value.port_name, + create_ts: value.create_ts, + lan_hostname: value.lan_hostname, + source: value.source, + ingress_port_number: value.ingress_port_number, + protocol: value.protocol, + } + } +} + +impl TuplePack for ProxiedPortsKey { + fn pack( + &self, + w: &mut W, + tuple_depth: TupleDepth, + ) -> std::io::Result { + let t = (ACTOR2, DATA, PORT, PROXIED, self.actor_id); + t.pack(w, tuple_depth) + } +} + +impl<'de> TupleUnpack<'de> for ProxiedPortsKey { + fn unpack(input: &[u8], tuple_depth: TupleDepth) -> PackResult<(&[u8], Self)> { + let (input, (_, _, _, _, actor_id)) = + <(usize, usize, usize, usize, util::Id)>::unpack(input, tuple_depth)?; + let v = ProxiedPortsKey { actor_id }; + + Ok((input, v)) + } +} + +/// ALl proxied ports are stored under a subspace separately from the actor data subspace because +/// we need to be able to efficiently list it for Rivet Guard. +pub struct ProxiedPortsSubspaceKey {} + +impl ProxiedPortsSubspaceKey { + pub fn new() -> Self { + ProxiedPortsSubspaceKey {} + } +} + +impl TuplePack for ProxiedPortsSubspaceKey { + fn pack( + &self, + w: &mut W, + tuple_depth: TupleDepth, + ) -> std::io::Result { + let t = (ACTOR2, DATA, PORT, PROXIED); + t.pack(w, tuple_depth) + } +} diff --git a/packages/edge/services/pegboard/src/keys/client.rs b/packages/edge/services/pegboard/src/keys/client.rs index 15347d5771..39a247962a 100644 --- a/packages/edge/services/pegboard/src/keys/client.rs +++ b/packages/edge/services/pegboard/src/keys/client.rs @@ -328,6 +328,87 @@ impl TuplePack for ActorSubspaceKey { } } +#[derive(Debug)] +pub struct Actor2Key { + client_id: Uuid, + pub actor_id: util::Id, +} + +impl Actor2Key { + pub fn new(client_id: Uuid, actor_id: util::Id) -> Self { + Actor2Key { + client_id, + actor_id, + } + } + + pub fn subspace(client_id: Uuid) -> Actor2SubspaceKey { + Actor2SubspaceKey::new(client_id) + } +} + +impl FormalKey for Actor2Key { + /// Generation. + type Value = u32; + + fn deserialize(&self, raw: &[u8]) -> Result { + if raw.is_empty() { + Ok(0) + } else { + Ok(u32::from_be_bytes(raw.try_into()?)) + } + } + + fn serialize(&self, value: Self::Value) -> Result> { + Ok(value.to_be_bytes().to_vec()) + } +} + +impl TuplePack for Actor2Key { + fn pack( + &self, + w: &mut W, + tuple_depth: TupleDepth, + ) -> std::io::Result { + let t = (CLIENT, ACTOR2, self.client_id, self.actor_id); + t.pack(w, tuple_depth) + } +} + +impl<'de> TupleUnpack<'de> for Actor2Key { + fn unpack(input: &[u8], tuple_depth: TupleDepth) -> PackResult<(&[u8], Self)> { + let (input, (_, _, client_id, actor_id)) = + <(usize, usize, Uuid, util::Id)>::unpack(input, tuple_depth)?; + let v = Actor2Key { + client_id, + actor_id, + }; + + Ok((input, v)) + } +} + +pub struct Actor2SubspaceKey { + client_id: Uuid, +} + +impl Actor2SubspaceKey { + fn new(client_id: Uuid) -> Self { + Actor2SubspaceKey { client_id } + } +} + +impl TuplePack for Actor2SubspaceKey { + fn pack( + &self, + w: &mut W, + tuple_depth: TupleDepth, + ) -> std::io::Result { + let t = (CLIENT, ACTOR2, self.client_id); + t.pack(w, tuple_depth) + } +} + #[derive(Debug)] pub struct WorkflowIdKey { client_id: Uuid, diff --git a/packages/edge/services/pegboard/src/keys/env.rs b/packages/edge/services/pegboard/src/keys/env.rs index f4a1b9d42f..7198cc653a 100644 --- a/packages/edge/services/pegboard/src/keys/env.rs +++ b/packages/edge/services/pegboard/src/keys/env.rs @@ -95,3 +95,117 @@ impl TuplePack for ActorSubspaceKey { t.pack(w, tuple_depth) } } + +#[derive(Debug)] +pub struct Actor2Key { + environment_id: Uuid, + pub create_ts: i64, + pub actor_id: util::Id, +} + +impl Actor2Key { + pub fn new(environment_id: Uuid, create_ts: i64, actor_id: util::Id) -> Self { + Actor2Key { + environment_id, + create_ts, + actor_id, + } + } + + pub fn subspace(environment_id: Uuid) -> Actor2SubspaceKey { + Actor2SubspaceKey::new(environment_id) + } + + pub fn subspace_with_create_ts(environment_id: Uuid, create_ts: i64) -> Actor2SubspaceKey { + Actor2SubspaceKey::new_with_create_ts(environment_id, create_ts) + } +} + +impl FormalKey for Actor2Key { + type Value = Actor2KeyData; + + fn deserialize(&self, raw: &[u8]) -> Result { + serde_json::from_slice(raw).map_err(Into::into) + } + + fn serialize(&self, value: Self::Value) -> Result> { + serde_json::to_vec(&value).map_err(Into::into) + } +} + +impl TuplePack for Actor2Key { + fn pack( + &self, + w: &mut W, + tuple_depth: TupleDepth, + ) -> std::io::Result { + let t = ( + ENV, + self.environment_id, + ACTOR2, + self.create_ts, + self.actor_id, + ); + t.pack(w, tuple_depth) + } +} + +impl<'de> TupleUnpack<'de> for Actor2Key { + fn unpack(input: &[u8], tuple_depth: TupleDepth) -> PackResult<(&[u8], Self)> { + let (input, (_, environment_id, _, create_ts, actor_id)) = + <(usize, Uuid, usize, i64, util::Id)>::unpack(input, tuple_depth)?; + let v = Actor2Key { + environment_id, + create_ts, + actor_id, + }; + + Ok((input, v)) + } +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct Actor2KeyData { + pub is_destroyed: bool, + pub tags: Vec<(String, String)>, +} + +pub struct Actor2SubspaceKey { + environment_id: Uuid, + create_ts: Option, +} + +impl Actor2SubspaceKey { + pub fn new(environment_id: Uuid) -> Self { + Actor2SubspaceKey { + environment_id, + create_ts: None, + } + } + + pub fn new_with_create_ts(environment_id: Uuid, create_ts: i64) -> Self { + Actor2SubspaceKey { + environment_id, + create_ts: Some(create_ts), + } + } +} + +impl TuplePack for Actor2SubspaceKey { + fn pack( + &self, + w: &mut W, + tuple_depth: TupleDepth, + ) -> std::io::Result { + let mut offset = VersionstampOffset::None { size: 0 }; + + let t = (ENV, self.environment_id, ACTOR2); + offset += t.pack(w, tuple_depth)?; + + if let Some(create_ts) = &self.create_ts { + offset += create_ts.pack(w, tuple_depth)?; + } + + Ok(offset) + } +} diff --git a/packages/edge/services/pegboard/src/keys/mod.rs b/packages/edge/services/pegboard/src/keys/mod.rs index 0c0d3a6011..27994acf65 100644 --- a/packages/edge/services/pegboard/src/keys/mod.rs +++ b/packages/edge/services/pegboard/src/keys/mod.rs @@ -1,6 +1,7 @@ use fdb_util::prelude::*; pub mod actor; +pub mod actor2; pub mod client; pub mod datacenter; pub mod env; diff --git a/packages/edge/services/pegboard/src/keys/port.rs b/packages/edge/services/pegboard/src/keys/port.rs index d43dc49e29..de0c806bc1 100644 --- a/packages/edge/services/pegboard/src/keys/port.rs +++ b/packages/edge/services/pegboard/src/keys/port.rs @@ -10,24 +10,58 @@ use crate::types::GameGuardProtocol; pub struct IngressKey { protocol: GameGuardProtocol, pub port: u16, - pub server_id: Uuid, + pub actor_id: Uuid, } impl IngressKey { - pub fn new(protocol: GameGuardProtocol, port: u16, server_id: Uuid) -> Self { + pub fn new(protocol: GameGuardProtocol, port: u16, actor_id: Uuid) -> Self { IngressKey { protocol, port, - server_id, + actor_id, } } +} + +impl TuplePack for IngressKey { + fn pack( + &self, + w: &mut W, + tuple_depth: TupleDepth, + ) -> std::io::Result { + let t = ( + PORT, + INGRESS, + self.protocol as usize, + self.port, + self.actor_id, + ); + t.pack(w, tuple_depth) + } +} + +#[derive(Debug)] +pub struct IngressKey2 { + protocol: GameGuardProtocol, + pub port: u16, + pub actor_id: util::Id, +} - pub fn subspace(protocol: GameGuardProtocol, port: u16) -> IngressSubspaceKey { - IngressSubspaceKey::new(protocol, port) +impl IngressKey2 { + pub fn new(protocol: GameGuardProtocol, port: u16, actor_id: util::Id) -> Self { + IngressKey2 { + protocol, + port, + actor_id, + } + } + + pub fn subspace(protocol: GameGuardProtocol, port: u16) -> IngressSubspaceKey2 { + IngressSubspaceKey2::new(protocol, port) } } -impl FormalKey for IngressKey { +impl FormalKey for IngressKey2 { type Value = (); fn deserialize(&self, _raw: &[u8]) -> Result { @@ -39,7 +73,7 @@ impl FormalKey for IngressKey { } } -impl TuplePack for IngressKey { +impl TuplePack for IngressKey2 { fn pack( &self, w: &mut W, @@ -50,43 +84,53 @@ impl TuplePack for IngressKey { INGRESS, self.protocol as usize, self.port, - self.server_id, + self.actor_id, ); t.pack(w, tuple_depth) } } -impl<'de> TupleUnpack<'de> for IngressKey { +impl<'de> TupleUnpack<'de> for IngressKey2 { fn unpack(input: &[u8], tuple_depth: TupleDepth) -> PackResult<(&[u8], Self)> { - let (input, (_, _, protocol_variant, port, server_id)) = - <(usize, usize, usize, u16, Uuid)>::unpack(input, tuple_depth)?; + // First parse as id, then uuid + let (input, (_, _, protocol_variant, port, actor_id)) = + if let Ok(res) = <(usize, usize, usize, u16, util::Id)>::unpack(input, tuple_depth) { + res + } else { + let (input, (_a, _b, protocol_variant, port, actor_id)) = + <(usize, usize, usize, u16, Uuid)>::unpack(input, tuple_depth)?; + ( + input, + (_a, _b, protocol_variant, port, util::Id::from(actor_id)), + ) + }; let protocol = GameGuardProtocol::from_repr(protocol_variant).ok_or_else(|| { PackError::Message( format!("invalid game guard protocol variant `{protocol_variant}` in key").into(), ) })?; - let v = IngressKey { + let v = IngressKey2 { protocol, port, - server_id, + actor_id, }; Ok((input, v)) } } -pub struct IngressSubspaceKey { +pub struct IngressSubspaceKey2 { protocol: GameGuardProtocol, port: u16, } -impl IngressSubspaceKey { +impl IngressSubspaceKey2 { pub fn new(protocol: GameGuardProtocol, port: u16) -> Self { - IngressSubspaceKey { protocol, port } + IngressSubspaceKey2 { protocol, port } } } -impl TuplePack for IngressSubspaceKey { +impl TuplePack for IngressSubspaceKey2 { fn pack( &self, w: &mut W, diff --git a/packages/edge/services/pegboard/src/ops/actor/allocate_ingress_ports.rs b/packages/edge/services/pegboard/src/ops/actor/allocate_ingress_ports.rs index 1152c695d7..867f99e6b3 100644 --- a/packages/edge/services/pegboard/src/ops/actor/allocate_ingress_ports.rs +++ b/packages/edge/services/pegboard/src/ops/actor/allocate_ingress_ports.rs @@ -10,9 +10,9 @@ use rand::Rng; use crate::{keys, types::GameGuardProtocol}; /// Allocates X ingress ports per given protocol uniquely in the global FDB index. -#[derive(Debug, Default)] +#[derive(Debug)] pub(crate) struct Input { - pub actor_id: Uuid, + pub actor_id: util::Id, /// How many ports of each protocol are needed. pub ports: Vec<(GameGuardProtocol, usize)>, } @@ -65,11 +65,11 @@ pub(crate) async fn pegboard_actor_allocate_ingress_ports( // Build start and end keys for ingress ports subspace let start_key = keys::subspace() - .subspace(&keys::port::IngressKey::subspace(*protocol, start)) + .subspace(&keys::port::IngressKey2::subspace(*protocol, start)) .range() .0; let end_key = keys::subspace() - .subspace(&keys::port::IngressKey::subspace( + .subspace(&keys::port::IngressKey2::subspace( *protocol, *port_range.end(), )) @@ -98,7 +98,7 @@ pub(crate) async fn pegboard_actor_allocate_ingress_ports( start = *port_range.start(); let start_key = keys::subspace() - .subspace(&keys::port::IngressKey::subspace( + .subspace(&keys::port::IngressKey2::subspace( *protocol, start, )) .range() @@ -129,7 +129,7 @@ pub(crate) async fn pegboard_actor_allocate_ingress_ports( }; let key = keys::subspace() - .unpack::(entry.key()) + .unpack::(entry.key()) .map_err(|x| fdb::FdbBindingError::CustomError(x.into()))?; let current_port = key.port; @@ -150,7 +150,7 @@ pub(crate) async fn pegboard_actor_allocate_ingress_ports( }; let ingress_port_key = - keys::port::IngressKey::new(*protocol, port, input.actor_id); + keys::port::IngressKey2::new(*protocol, port, input.actor_id); let ingress_port_key_buf = keys::subspace().pack(&ingress_port_key); // Add read conflict only for this key diff --git a/packages/edge/services/pegboard/src/ops/actor/get.rs b/packages/edge/services/pegboard/src/ops/actor/get.rs index 765d4aac34..40f2e65275 100644 --- a/packages/edge/services/pegboard/src/ops/actor/get.rs +++ b/packages/edge/services/pegboard/src/ops/actor/get.rs @@ -56,7 +56,7 @@ pub(crate) struct PortProxied { } struct ActorData { - actor_id: Uuid, + actor_id: util::Id, row: ActorRow, port_ingress_rows: Vec, port_host_rows: Vec, @@ -65,7 +65,7 @@ struct ActorData { #[derive(Debug)] pub struct Input { - pub actor_ids: Vec, + pub actor_ids: Vec, /// If null, will fall back to the default endpoint type for the datacenter. /// @@ -91,18 +91,35 @@ pub async fn pegboard_actor_get(ctx: &OperationCtx, input: &Input) -> GlobalResu .map(|actor_id| { let tx = tx.clone(); async move { - let workflow_id_key = keys::actor::WorkflowIdKey::new(actor_id); - let workflow_id_entry = tx - .get(&keys::subspace().pack(&workflow_id_key), SERIALIZABLE) - .await?; + let workflow_id = if let Some(actor_id) = actor_id.as_v0() { + let workflow_id_key = keys::actor::WorkflowIdKey::new(actor_id); - let Some(workflow_id_entry) = workflow_id_entry else { - return Ok(None); - }; + let workflow_id_entry = tx + .get(&keys::subspace().pack(&workflow_id_key), SERIALIZABLE) + .await?; + + let Some(workflow_id_entry) = workflow_id_entry else { + return Ok(None); + }; + + workflow_id_key + .deserialize(&workflow_id_entry) + .map_err(|x| fdb::FdbBindingError::CustomError(x.into()))? + } else { + let workflow_id_key = keys::actor2::WorkflowIdKey::new(actor_id); - let workflow_id = workflow_id_key - .deserialize(&workflow_id_entry) - .map_err(|x| fdb::FdbBindingError::CustomError(x.into()))?; + let workflow_id_entry = tx + .get(&keys::subspace().pack(&workflow_id_key), SERIALIZABLE) + .await?; + + let Some(workflow_id_entry) = workflow_id_entry else { + return Ok(None); + }; + + workflow_id_key + .deserialize(&workflow_id_entry) + .map_err(|x| fdb::FdbBindingError::CustomError(x.into()))? + }; Ok(Some((actor_id, workflow_id))) } @@ -311,7 +328,7 @@ pub async fn pegboard_actor_get(ctx: &OperationCtx, input: &Input) -> GlobalResu } pub(crate) fn create_port_ingress( - actor_id: Uuid, + actor_id: util::Id, port: &PortIngress, protocol: GameGuardProtocol, endpoint_type: EndpointType, diff --git a/packages/edge/services/pegboard/src/ops/actor/list_for_env.rs b/packages/edge/services/pegboard/src/ops/actor/list_for_env.rs index 5423516321..6b919c2563 100644 --- a/packages/edge/services/pegboard/src/ops/actor/list_for_env.rs +++ b/packages/edge/services/pegboard/src/ops/actor/list_for_env.rs @@ -23,7 +23,7 @@ pub struct Output { #[derive(Debug)] pub struct ActorEntry { - pub actor_id: Uuid, + pub actor_id: util::Id, pub create_ts: i64, } @@ -36,6 +36,59 @@ pub async fn pegboard_actor_list_for_env( .fdb() .await? .run(|tx, _mc| async move { + // Read from actor2 first + let actor2_subspace = + keys::subspace().subspace(&keys::env::Actor2Key::subspace(input.env_id)); + let (start2, end2) = actor2_subspace.range(); + + let end2 = if let Some(created_before) = input.created_before { + fdb_util::end_of_key_range(&keys::subspace().pack( + &keys::env::Actor2Key::subspace_with_create_ts(input.env_id, created_before), + )) + } else { + end2 + }; + + let mut stream2 = tx.get_ranges_keyvalues( + fdb::RangeOption { + mode: StreamingMode::Iterator, + reverse: true, + ..(start2, end2).into() + }, + // NOTE: Does not have to be serializable because we are listing, stale data does not matter + SNAPSHOT, + ); + let mut results = Vec::new(); + + while let Some(entry) = stream2.try_next().await? { + let actor_key = keys::subspace() + .unpack::(entry.key()) + .map_err(|x| fdb::FdbBindingError::CustomError(x.into()))?; + let data = actor_key + .deserialize(entry.value()) + .map_err(|x| fdb::FdbBindingError::CustomError(x.into()))?; + + if input.include_destroyed || !data.is_destroyed { + // Compute intersection between actor tags and input tags + let tags_match = input + .tags + .iter() + .all(|(k, v)| data.tags.iter().any(|(k2, v2)| k == k2 && v == v2)); + + if tags_match { + results.push(ActorEntry { + actor_id: actor_key.actor_id, + create_ts: actor_key.create_ts, + }); + + if results.len() == input.limit { + break; + } + } + } + } + + // Read from old actors let actor_subspace = keys::subspace().subspace(&keys::env::ActorKey::subspace(input.env_id)); let (start, end) = actor_subspace.range(); @@ -59,7 +112,6 @@ pub async fn pegboard_actor_list_for_env( // NOTE: Does not have to be serializable because we are listing, stale data does not matter SNAPSHOT, ); - let mut results = Vec::new(); while let Some(entry) = stream.try_next().await? { let actor_key = keys::subspace() @@ -70,7 +122,7 @@ pub async fn pegboard_actor_list_for_env( .map_err(|x| fdb::FdbBindingError::CustomError(x.into()))?; if input.include_destroyed || !data.is_destroyed { - // Compute intersection between actor tags and input tags + // Compute intersection between ds tags and input tags let tags_match = input .tags .iter() @@ -78,7 +130,7 @@ pub async fn pegboard_actor_list_for_env( if tags_match { results.push(ActorEntry { - actor_id: actor_key.actor_id, + actor_id: actor_key.actor_id.into(), create_ts: actor_key.create_ts, }); diff --git a/packages/edge/services/pegboard/src/ops/actor/log/read.rs b/packages/edge/services/pegboard/src/ops/actor/log/read.rs index e4a3918d41..90d2dc1bc1 100644 --- a/packages/edge/services/pegboard/src/ops/actor/log/read.rs +++ b/packages/edge/services/pegboard/src/ops/actor/log/read.rs @@ -5,7 +5,7 @@ use crate::types::LogsStreamType; #[derive(Debug)] pub struct Input { pub env_id: Uuid, - pub actor_ids: Vec, + pub actor_ids: Vec, pub stream_types: Vec, pub count: i64, pub order_by: Order, @@ -35,21 +35,12 @@ pub struct Output { } #[derive(Debug, clickhouse::Row, serde::Deserialize)] -pub struct LogEntryRow { - /// In nanoseconds. - pub ts: i64, - pub message: Vec, - pub stream_type: u8, - pub actor_id_str: String, -} - -#[derive(Debug)] pub struct LogEntry { /// In nanoseconds. pub ts: i64, pub message: Vec, pub stream_type: u8, - pub actor_id: Uuid, + pub actor_id: String, } #[operation] @@ -129,8 +120,7 @@ pub async fn pegboard_actor_log_read(ctx: &OperationCtx, input: &Input) -> Globa ) -- Use dynamic direction directly in the ORDER BY clause ORDER BY ts {order_direction} - LIMIT - ? + LIMIT ? " ); @@ -143,7 +133,7 @@ pub async fn pegboard_actor_log_read(ctx: &OperationCtx, input: &Input) -> Globa .bind(&ctx.config().server()?.rivet.namespace) .bind(input.env_id) .bind(&actor_id_strings) - .bind(stream_type_values) + .bind(&stream_type_values) // Query type parameters .bind(is_all) .bind(is_before) @@ -157,30 +147,17 @@ pub async fn pegboard_actor_log_read(ctx: &OperationCtx, input: &Input) -> Globa // Search parameters .bind(apply_search) .bind(enable_regex) - .bind(regex_text) + .bind(®ex_text) .bind(case_sensitive) .bind(search_text) .bind(search_text.to_lowercase()) // Limit .bind(input.count); - let entries = query_builder - .fetch_all::() - .await - .map_err(|err| GlobalError::from(err))? - .into_iter() - .map(|x| { - Ok(LogEntry { - ts: x.ts, - message: x.message, - stream_type: x.stream_type, - actor_id: unwrap!( - Uuid::parse_str(&x.actor_id_str).ok(), - "invalid actor log entry uuid" - ), - }) - }) - .collect::>>()?; - - Ok(Output { entries }) + let entries = query_builder.fetch_all::().await?; + + // New actors first + Ok(Output { + entries, + }) } diff --git a/packages/edge/services/pegboard/src/protocol.rs b/packages/edge/services/pegboard/src/protocol.rs index 61ee4927c6..94bbea7eef 100644 --- a/packages/edge/services/pegboard/src/protocol.rs +++ b/packages/edge/services/pegboard/src/protocol.rs @@ -83,13 +83,13 @@ pub struct CommandWrapper { #[serde(rename_all = "snake_case")] pub enum Command { StartActor { - actor_id: Uuid, + actor_id: util::Id, #[serde(default)] generation: u32, config: Box, }, SignalActor { - actor_id: Uuid, + actor_id: util::Id, #[serde(default)] generation: u32, // See nix::sys::signal::Signal @@ -246,7 +246,7 @@ pub struct ActorMetadata { #[derive(Debug, Serialize, Deserialize, Clone, Hash)] pub struct ActorMetadataActor { - pub actor_id: Uuid, + pub actor_id: util::Id, pub tags: HashableMap, pub create_ts: i64, } @@ -324,7 +324,7 @@ pub struct EventWrapper { #[serde(rename_all = "snake_case")] pub enum Event { ActorStateUpdate { - actor_id: Uuid, + actor_id: util::Id, #[serde(default)] generation: u32, state: ActorState, diff --git a/packages/edge/services/pegboard/src/types.rs b/packages/edge/services/pegboard/src/types.rs index daf7632ca1..da7490ad35 100644 --- a/packages/edge/services/pegboard/src/types.rs +++ b/packages/edge/services/pegboard/src/types.rs @@ -9,16 +9,18 @@ use strum::FromRepr; #[derive(Debug, Clone)] pub struct Actor { - pub actor_id: Uuid, + pub actor_id: util::Id, pub env_id: Uuid, pub tags: HashMap, - pub resources: Option, - pub lifecycle: ActorLifecycle, + pub image_id: Uuid, + pub create_ts: i64, pub start_ts: Option, pub connectable_ts: Option, pub destroy_ts: Option, - pub image_id: Uuid, + + pub lifecycle: ActorLifecycle, + pub resources: Option, pub args: Vec, pub network_mode: NetworkMode, pub environment: HashMap, @@ -155,7 +157,7 @@ pub fn convert_actor_to_api( datacenter: &cluster::types::Datacenter, ) -> GlobalResult { Ok(models::ActorsActor { - id: value.actor_id, + id: value.actor_id.to_string(), region: datacenter.name_id.clone(), created_at: util::timestamp::to_string(value.create_ts)?, // `started_at` -> `connectable_ts` is intentional. We don't expose the internal diff --git a/packages/edge/services/pegboard/src/util.rs b/packages/edge/services/pegboard/src/util.rs index cae08c7ac6..44255390ad 100644 --- a/packages/edge/services/pegboard/src/util.rs +++ b/packages/edge/services/pegboard/src/util.rs @@ -9,7 +9,7 @@ const UUID_PATTERN: &str = r"[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0- const PORT_NAME_PATTERN: &str = r"[a-zA-Z0-9-_]+"; pub fn build_actor_hostname_and_path( - actor_id: Uuid, + actor_id: util::Id, port_name: &str, protocol: GameGuardProtocol, endpoint_type: EndpointType, diff --git a/packages/edge/services/pegboard/src/workflows/actor/destroy.rs b/packages/edge/services/pegboard/src/workflows/actor/destroy.rs index 07b73a1e1f..4bf977c07b 100644 --- a/packages/edge/services/pegboard/src/workflows/actor/destroy.rs +++ b/packages/edge/services/pegboard/src/workflows/actor/destroy.rs @@ -188,21 +188,23 @@ pub(crate) async fn clear_ports_and_resources( ) -> Result<(), fdb::FdbBindingError> { // Remove all allocated ingress ports for (protocol, port) in ingress_ports { - let ingress_port_key = keys::port::IngressKey::new( - GameGuardProtocol::from_repr( - usize::try_from(protocol) - .map_err(|x| fdb::FdbBindingError::CustomError(x.into()))?, + let protocol = GameGuardProtocol::from_repr( + usize::try_from(protocol).map_err(|x| fdb::FdbBindingError::CustomError(x.into()))?, + ) + .ok_or_else(|| { + fdb::FdbBindingError::CustomError( + format!("invalid protocol variant: {protocol}").into(), ) - .ok_or_else(|| { - fdb::FdbBindingError::CustomError( - format!("invalid protocol variant: {protocol}").into(), - ) - })?, - u16::try_from(port).map_err(|x| fdb::FdbBindingError::CustomError(x.into()))?, - actor_id, - ); + })?; + let port = u16::try_from(port).map_err(|x| fdb::FdbBindingError::CustomError(x.into()))?; + + let ingress_port_key = keys::port::IngressKey::new(protocol, port, actor_id); tx.clear(&keys::subspace().pack(&ingress_port_key)); + + let ingress_port_key2 = keys::port::IngressKey::new(protocol, port, actor_id.into()); + + tx.clear(&keys::subspace().pack(&ingress_port_key2)); } // Remove proxied ports @@ -345,7 +347,7 @@ pub(crate) async fn kill( ) -> GlobalResult<()> { if kill_timeout_ms != 0 { ctx.signal(protocol::Command::SignalActor { - actor_id, + actor_id: actor_id.into(), generation, signal: Signal::SIGTERM as i32, persist_storage, @@ -359,7 +361,7 @@ pub(crate) async fn kill( } ctx.signal(protocol::Command::SignalActor { - actor_id, + actor_id: actor_id.into(), generation, signal: Signal::SIGKILL as i32, persist_storage, diff --git a/packages/edge/services/pegboard/src/workflows/actor/runtime.rs b/packages/edge/services/pegboard/src/workflows/actor/runtime.rs index f43e85b53d..53baa814c0 100644 --- a/packages/edge/services/pegboard/src/workflows/actor/runtime.rs +++ b/packages/edge/services/pegboard/src/workflows/actor/runtime.rs @@ -167,7 +167,7 @@ async fn fetch_ports(ctx: &ActivityCtx, input: &FetchPortsInput) -> GlobalResult .into_iter() .map(|row| { let port = get::create_port_ingress( - input.actor_id, + input.actor_id.into(), &row, unwrap!(GameGuardProtocol::from_repr(row.protocol.try_into()?)), endpoint_type, @@ -679,7 +679,7 @@ pub async fn spawn_actor( let cluster_id = ctx.config().server()?.rivet.edge()?.cluster_id; ctx.signal(protocol::Command::StartActor { - actor_id: input.actor_id, + actor_id: input.actor_id.into(), generation, config: Box::new(protocol::ActorConfig { image: protocol::Image { @@ -693,7 +693,7 @@ pub async fn spawn_actor( allocation_type: protocol::ImageAllocationType::Single, }, root_user_enabled: input.root_user_enabled, - env: input.environment.as_hashable(), + env: input.environment.clone(), runner: None, ports: ports_res .ports @@ -733,7 +733,7 @@ pub async fn spawn_actor( resources: actor_setup.resources.clone(), metadata: util::serde::Raw::new(&protocol::ActorMetadata { actor: protocol::ActorMetadataActor { - actor_id: input.actor_id, + actor_id: input.actor_id.into(), tags: input.tags.clone(), create_ts: ctx.ts(), }, diff --git a/packages/edge/services/pegboard/src/workflows/actor/setup.rs b/packages/edge/services/pegboard/src/workflows/actor/setup.rs index 2a8bd5892f..90d4df9557 100644 --- a/packages/edge/services/pegboard/src/workflows/actor/setup.rs +++ b/packages/edge/services/pegboard/src/workflows/actor/setup.rs @@ -335,7 +335,7 @@ async fn insert_db(ctx: &ActivityCtx, input: &InsertDbInput) -> GlobalResult, /// Whether or not to send signals to the pb actor. In the case that the actor was already stopped @@ -128,7 +128,7 @@ async fn update_db( #[derive(Debug, Serialize, Deserialize, Hash)] pub struct UpdateFdbInput { - actor_id: Uuid, + actor_id: util::Id, image_id: Uuid, build_allocation_type: Option, actor: UpdateDbOutput, @@ -162,12 +162,12 @@ pub async fn update_fdb( let ingress_ports = ingress_ports.clone(); async move { // Update actor key index in env subspace - let actor_key = keys::env::ActorKey::new( + let actor_key = keys::env::Actor2Key::new( input.actor.env_id, input.actor.create_ts, input.actor_id, ); - let data = keys::env::ActorKeyData { + let data = keys::env::Actor2KeyData { is_destroyed: true, tags: input.actor.tags.0.clone().into_iter().collect(), }; @@ -202,7 +202,7 @@ pub async fn update_fdb( // TODO: Clean up args /// Clears allocated ports and resources (if they were allocated). pub(crate) async fn clear_ports_and_resources( - actor_id: Uuid, + actor_id: util::Id, image_id: Uuid, build_allocation_type: Option, ingress_ports: Vec<(i64, i64)>, @@ -215,7 +215,7 @@ pub(crate) async fn clear_ports_and_resources( ) -> Result { // Remove all allocated ingress ports for (protocol, port) in ingress_ports { - let ingress_port_key = keys::port::IngressKey::new( + let ingress_port_key = keys::port::IngressKey2::new( GameGuardProtocol::from_repr( usize::try_from(protocol) .map_err(|x| fdb::FdbBindingError::CustomError(x.into()))?, @@ -233,13 +233,13 @@ pub(crate) async fn clear_ports_and_resources( } // Remove proxied ports - let proxied_ports_key = keys::actor::ProxiedPortsKey::new(actor_id); + let proxied_ports_key = keys::actor2::ProxiedPortsKey::new(actor_id); tx.clear(&keys::subspace().pack(&proxied_ports_key)); if let Some(client_id) = client_id { // This is cleared when the state changes as well as when the actor is destroyed to ensure // consistency during rescheduling and forced deletion. - let actor_key = keys::client::ActorKey::new(client_id, actor_id); + let actor_key = keys::client::Actor2Key::new(client_id, actor_id); tx.clear(&keys::subspace().pack(&actor_key)); } @@ -448,7 +448,7 @@ pub(crate) async fn clear_ports_and_resources( pub(crate) async fn kill( ctx: &mut WorkflowCtx, - actor_id: Uuid, + actor_id: util::Id, generation: u32, client_workflow_id: Uuid, kill_timeout_ms: i64, diff --git a/packages/edge/services/pegboard/src/workflows/actor2/mod.rs b/packages/edge/services/pegboard/src/workflows/actor2/mod.rs index 561e5e6745..fe6c383843 100644 --- a/packages/edge/services/pegboard/src/workflows/actor2/mod.rs +++ b/packages/edge/services/pegboard/src/workflows/actor2/mod.rs @@ -29,7 +29,7 @@ const ACTOR_EXIT_THRESHOLD_MS: i64 = util::duration::seconds(5); #[derive(Clone, Debug, Serialize, Deserialize)] pub struct Input { - pub actor_id: Uuid, + pub actor_id: util::Id, pub env_id: Uuid, pub tags: HashMap, pub resources: Option, diff --git a/packages/edge/services/pegboard/src/workflows/actor2/runtime.rs b/packages/edge/services/pegboard/src/workflows/actor2/runtime.rs index adb3b5f4c5..3e0210f916 100644 --- a/packages/edge/services/pegboard/src/workflows/actor2/runtime.rs +++ b/packages/edge/services/pegboard/src/workflows/actor2/runtime.rs @@ -8,6 +8,7 @@ use foundationdb::{ options::{ConflictRangeType, StreamingMode}, }; use futures_util::{FutureExt, TryStreamExt}; +use nix::sys::signal::Signal; use sqlx::Acquire; use util::serde::AsHashableExt; @@ -91,7 +92,7 @@ async fn update_client_and_runner( input.client_id, input.client_workflow_id, input.runner_id, - client_wan_hostname, + &client_wan_hostname, ) .await?; @@ -100,7 +101,7 @@ async fn update_client_and_runner( #[derive(Debug, Serialize, Deserialize, Hash)] struct FetchPortsInput { - actor_id: Uuid, + actor_id: util::Id, endpoint_type: Option, } @@ -201,7 +202,7 @@ async fn fetch_ports(ctx: &ActivityCtx, input: &FetchPortsInput) -> GlobalResult #[derive(Debug, Serialize, Deserialize, Hash)] struct AllocateActorInput { - actor_id: Uuid, + actor_id: util::Id, generation: u32, image_id: Uuid, build_allocation_type: BuildAllocationType, @@ -304,7 +305,7 @@ async fn allocate_actor( // Insert actor index key let client_actor_key = - keys::client::ActorKey::new(data.client_id, input.actor_id); + keys::client::Actor2Key::new(data.client_id, input.actor_id); tx.set( &keys::subspace().pack(&client_actor_key), &client_actor_key @@ -477,7 +478,7 @@ async fn allocate_actor( } // Insert actor index key - let client_actor_key = keys::client::ActorKey::new( + let client_actor_key = keys::client::Actor2Key::new( old_client_allocation_key.client_id, input.actor_id, ); @@ -509,7 +510,7 @@ async fn allocate_actor( #[derive(Debug, Serialize, Deserialize, Hash)] pub struct UpdateFdbInput { - pub actor_id: Uuid, + pub actor_id: util::Id, pub client_id: Uuid, pub state: protocol::ActorState, } @@ -527,7 +528,7 @@ pub async fn update_fdb(ctx: &ActivityCtx, input: &UpdateFdbInput) -> GlobalResu // Was inserted when the actor was allocated. This is cleared when the state changes as // well as when the actor is destroyed to ensure consistency during rescheduling and // forced deletion. - let actor_key = keys::client::ActorKey::new(input.client_id, input.actor_id); + let actor_key = keys::client::Actor2Key::new(input.client_id, input.actor_id); tx.clear(&keys::subspace().pack(&actor_key)); Ok(()) @@ -657,7 +658,7 @@ pub async fn insert_ports(ctx: &ActivityCtx, input: &InsertPortsInput) -> Global #[derive(Debug, Serialize, Deserialize, Hash)] pub struct InsertPortsFdbInput { - pub actor_id: Uuid, + pub actor_id: util::Id, pub ports: util::serde::HashableMap, } @@ -699,7 +700,7 @@ pub async fn insert_ports_fdb(ctx: &ActivityCtx, input: &InsertPortsFdbInput) -> .map(|(port_name, port, ingress_port_number, protocol)| { let protocol = unwrap!(GameGuardProtocol::from_repr((*protocol).try_into()?)); - Ok(keys::actor::ProxiedPort { + Ok(keys::actor2::ProxiedPort { port_name: port_name.clone(), create_ts, lan_hostname: port.lan_hostname.clone(), @@ -716,7 +717,7 @@ pub async fn insert_ports_fdb(ctx: &ActivityCtx, input: &InsertPortsFdbInput) -> .run(|tx, _mc| { let proxied_ports = proxied_ports.clone(); async move { - let proxied_ports_key = keys::actor::ProxiedPortsKey::new(input.actor_id); + let proxied_ports_key = keys::actor2::ProxiedPortsKey::new(input.actor_id); tx.set( &keys::subspace().pack(&proxied_ports_key), @@ -903,14 +904,27 @@ pub async fn reschedule_actor( ) -> GlobalResult> { tracing::debug!(actor_id=?input.actor_id, "rescheduling actor"); - ctx.activity(ClearPortsAndResourcesInput { - actor_id: input.actor_id, - image_id, - runner_id: state.runner_id, - client_id: state.client_id, - client_workflow_id: state.client_workflow_id, - }) - .await?; + let res = ctx + .activity(ClearPortsAndResourcesInput { + actor_id: input.actor_id, + image_id, + runner_id: state.runner_id, + client_id: state.client_id, + client_workflow_id: state.client_workflow_id, + }) + .await?; + + // `destroy_runner` is true when this was the last actor running on that runner, meaning we have to + // destroy it. + if res.destroy_runner { + ctx.signal(protocol::Command::SignalRunner { + runner_id: state.runner_id, + signal: Signal::SIGKILL as i32, + }) + .to_workflow_id(state.client_workflow_id) + .send() + .await?; + } let actor_setup = setup::setup(ctx, &input, setup::SetupCtx::Reschedule { image_id }).await?; @@ -989,18 +1003,23 @@ struct RescheduleState { #[derive(Debug, Serialize, Deserialize, Hash)] struct ClearPortsAndResourcesInput { - actor_id: Uuid, + actor_id: util::Id, image_id: Uuid, runner_id: Uuid, client_id: Uuid, client_workflow_id: Uuid, } +#[derive(Debug, Serialize, Deserialize, Hash)] +pub struct ClearPortsAndResourcesOutput { + destroy_runner: bool, +} + #[activity(ClearPortsAndResources)] async fn clear_ports_and_resources( ctx: &ActivityCtx, input: &ClearPortsAndResourcesInput, -) -> GlobalResult<()> { +) -> GlobalResult { let pool = &ctx.sqlite().await?; let ( @@ -1036,7 +1055,8 @@ async fn clear_ports_and_resources( )?; let build = unwrap_with!(build_res.builds.first(), BUILD_NOT_FOUND); - ctx.fdb() + let destroy_runner = ctx + .fdb() .await? .run(|tx, _mc| { let ingress_ports = ingress_ports.clone(); @@ -1059,7 +1079,7 @@ async fn clear_ports_and_resources( .custom_instrument(tracing::info_span!("actor_clear_ports_and_resources_tx")) .await?; - Ok(()) + Ok(ClearPortsAndResourcesOutput { destroy_runner }) } #[derive(Debug, Serialize, Deserialize, Hash)] diff --git a/packages/edge/services/pegboard/src/workflows/actor2/setup.rs b/packages/edge/services/pegboard/src/workflows/actor2/setup.rs index b1fb0173c4..dc9d992d8a 100644 --- a/packages/edge/services/pegboard/src/workflows/actor2/setup.rs +++ b/packages/edge/services/pegboard/src/workflows/actor2/setup.rs @@ -282,7 +282,7 @@ pub async fn disable_tls_ports( #[derive(Debug, Clone, Serialize, Deserialize, Hash)] struct InsertDbInput { - actor_id: Uuid, + actor_id: util::Id, env_id: Uuid, tags: util::serde::HashableMap, resources: Option, @@ -432,7 +432,7 @@ async fn insert_db(ctx: &ActivityCtx, input: &InsertDbInput) -> GlobalResult, create_ts: i64, @@ -443,7 +443,7 @@ async fn insert_fdb(ctx: &ActivityCtx, input: &InsertFdbInput) -> GlobalResult<( ctx.fdb() .await? .run(|tx, _mc| async move { - let create_ts_key = keys::actor::CreateTsKey::new(input.actor_id); + let create_ts_key = keys::actor2::CreateTsKey::new(input.actor_id); tx.set( &keys::subspace().pack(&create_ts_key), &create_ts_key @@ -451,7 +451,7 @@ async fn insert_fdb(ctx: &ActivityCtx, input: &InsertFdbInput) -> GlobalResult<( .map_err(|x| fdb::FdbBindingError::CustomError(x.into()))?, ); - let workflow_id_key = keys::actor::WorkflowIdKey::new(input.actor_id); + let workflow_id_key = keys::actor2::WorkflowIdKey::new(input.actor_id); tx.set( &keys::subspace().pack(&workflow_id_key), &workflow_id_key @@ -461,8 +461,8 @@ async fn insert_fdb(ctx: &ActivityCtx, input: &InsertFdbInput) -> GlobalResult<( // Add env index key let env_actor_key = - keys::env::ActorKey::new(input.env_id, input.create_ts, input.actor_id); - let data = keys::env::ActorKeyData { + keys::env::Actor2Key::new(input.env_id, input.create_ts, input.actor_id); + let data = keys::env::Actor2KeyData { is_destroyed: false, tags: input.tags.clone().into_iter().collect(), }; diff --git a/packages/toolchain/cli/src/commands/actor/logs.rs b/packages/toolchain/cli/src/commands/actor/logs.rs index 662df560f1..6d51bb2715 100644 --- a/packages/toolchain/cli/src/commands/actor/logs.rs +++ b/packages/toolchain/cli/src/commands/actor/logs.rs @@ -1,7 +1,6 @@ use anyhow::*; use clap::Parser; use toolchain::errors; -use uuid::Uuid; /// Stream logs from a specific actor #[derive(Parser)] @@ -33,9 +32,6 @@ impl Opts { let env = crate::util::env::get_or_select(&ctx, self.environment.as_ref()).await?; - let actor_id = - Uuid::parse_str(&self.id).map_err(|_| errors::UserError::new("invalid id uuid"))?; - let print_type = if self.no_timestamps { toolchain::util::actor::logs::PrintType::Print } else { @@ -45,7 +41,7 @@ impl Opts { &ctx, toolchain::util::actor::logs::TailOpts { environment: &env, - actor_id, + actor_id: self.id.clone(), stream: self .stream .clone() diff --git a/packages/toolchain/toolchain/src/tasks/build_publish/js.rs b/packages/toolchain/toolchain/src/tasks/build_publish/js.rs index 282e8aae81..6b49babd7e 100644 --- a/packages/toolchain/toolchain/src/tasks/build_publish/js.rs +++ b/packages/toolchain/toolchain/src/tasks/build_publish/js.rs @@ -204,10 +204,10 @@ async fn upload_bundle( build_archive.append_dir_all(".", &push_opts.build_path)?; let build_tar_file = build_archive.into_inner()?; - let build_kind = models::BuildsBuildKind::Javascript; + let build_kind = models::BuildsKind::Javascript; let build_compression = match push_opts.compression { - config::build::Compression::None => models::BuildsBuildCompression::None, - config::build::Compression::Lz4 => models::BuildsBuildCompression::Lz4, + config::build::Compression::None => models::BuildsCompression::None, + config::build::Compression::Lz4 => models::BuildsCompression::Lz4, }; // Compress build @@ -238,6 +238,7 @@ async fn upload_bundle( kind: Some(build_kind), compression: Some(build_compression), allocation: None, + resources: None, }, Some(&ctx.project.name_id), Some(&push_opts.env.slug), diff --git a/packages/toolchain/toolchain/src/tasks/deploy/mod.rs b/packages/toolchain/toolchain/src/tasks/deploy/mod.rs index d1b3074602..366f6113b3 100644 --- a/packages/toolchain/toolchain/src/tasks/deploy/mod.rs +++ b/packages/toolchain/toolchain/src/tasks/deploy/mod.rs @@ -218,7 +218,7 @@ async fn create_edge_function_actors( for actor in &actors_res.actors { existing_regions.insert(actor.region.clone()); - existing_actors.insert(actor.region.clone(), actor.id); + existing_actors.insert(actor.region.clone(), actor.id.clone()); } // Create or upgrade actors for each region diff --git a/packages/toolchain/toolchain/src/util/actor/logs.rs b/packages/toolchain/toolchain/src/util/actor/logs.rs index 53d22e49be..271aac167d 100644 --- a/packages/toolchain/toolchain/src/util/actor/logs.rs +++ b/packages/toolchain/toolchain/src/util/actor/logs.rs @@ -32,7 +32,7 @@ pub enum PrintType { pub struct TailOpts<'a> { pub print_type: PrintType, pub environment: &'a str, - pub actor_id: Uuid, + pub actor_id: String, pub stream: LogStream, pub follow: bool, pub exit_on_ctrl_c: bool, @@ -198,7 +198,7 @@ async fn poll_actor_state( let res = apis::actors_api::actors_get( &ctx.openapi_config_cloud, - &opts.actor_id.to_string(), + &opts.actor_id, Some(&ctx.project.name_id), Some(opts.environment), None, diff --git a/packages/toolchain/toolchain/src/util/build.rs b/packages/toolchain/toolchain/src/util/build.rs index 20f4fb765c..7121a25666 100644 --- a/packages/toolchain/toolchain/src/util/build.rs +++ b/packages/toolchain/toolchain/src/util/build.rs @@ -1,20 +1,20 @@ use anyhow::*; -use rivet_api::models::{BuildsBuildCompression, BuildsBuildKind}; +use rivet_api::models::{BuildsCompression, BuildsKind}; use std::path::Path; use crate::{config, util::lz4}; /// Generates the file name that holds the build tar. -pub fn file_name(kind: BuildsBuildKind, compression: BuildsBuildCompression) -> String { +pub fn file_name(kind: BuildsKind, compression: BuildsCompression) -> String { let file_name = match kind { - BuildsBuildKind::DockerImage => "image", - BuildsBuildKind::OciBundle => "oci-bundle", - BuildsBuildKind::Javascript => "js-bundle", + BuildsKind::DockerImage => "image", + BuildsKind::OciBundle => "oci-bundle", + BuildsKind::Javascript => "js-bundle", }; let file_ext = "tar"; let file_ext_compression = match compression { - BuildsBuildCompression::None => "", - BuildsBuildCompression::Lz4 => ".lz4", + BuildsCompression::None => "", + BuildsCompression::Lz4 => ".lz4", }; format!("{file_name}.{file_ext}{file_ext_compression}") } diff --git a/packages/toolchain/toolchain/src/util/docker/push.rs b/packages/toolchain/toolchain/src/util/docker/push.rs index d317cf25dc..176a0e4955 100644 --- a/packages/toolchain/toolchain/src/util/docker/push.rs +++ b/packages/toolchain/toolchain/src/util/docker/push.rs @@ -53,13 +53,13 @@ pub async fn push_tar( )); let build_kind = match push_opts.bundle { - config::build::docker::BundleKind::DockerImage => models::BuildsBuildKind::DockerImage, - config::build::docker::BundleKind::OciBundle => models::BuildsBuildKind::OciBundle, + config::build::docker::BundleKind::DockerImage => models::BuildsKind::DockerImage, + config::build::docker::BundleKind::OciBundle => models::BuildsKind::OciBundle, }; let build_compression = match push_opts.compression { - config::build::Compression::None => models::BuildsBuildCompression::None, - config::build::Compression::Lz4 => models::BuildsBuildCompression::Lz4, + config::build::Compression::None => models::BuildsCompression::None, + config::build::Compression::Lz4 => models::BuildsCompression::Lz4, }; let build_res = apis::builds_api::builds_prepare( @@ -74,12 +74,13 @@ pub async fn push_tar( kind: Some(build_kind), compression: Some(build_compression), // TODO: Expose to CLI and config - allocation: Some(Box::new(models::BuildsBuildAllocation { - // single: Some(serde_json::json!({})), - // multi: None, - single: None, - multi: Some(Box::new(models::BuildsBuildAllocationMulti { slots: 4 })), + allocation: Some(Box::new(models::BuildsAllocation { + single: Some(serde_json::json!({})), + multi: None, + // single: None, + // multi: Some(Box::new(models::BuildsAllocationMulti { slots: 4 })), })), + resources: None, }, Some(&ctx.project.name_id), Some(&push_opts.env.slug), diff --git a/sdks/api/fern/definition/actors/__package__.yml b/sdks/api/fern/definition/actors/__package__.yml index 38cefce292..7aa261f32f 100644 --- a/sdks/api/fern/definition/actors/__package__.yml +++ b/sdks/api/fern/definition/actors/__package__.yml @@ -13,11 +13,11 @@ service: get: path: /{actor} method: GET - docs: Gets a dynamic actor. + docs: Gets a actor. path-parameters: actor: docs: The id of the actor to destroy - type: uuid + type: commons.Id request: name: ListActorsRequestQuery query-parameters: @@ -46,7 +46,7 @@ service: create: path: "" method: POST - docs: Create a new dynamic actor. + docs: Create a new actor. request: name: CreateActorRequestQuery body: CreateActorRequest @@ -59,11 +59,11 @@ service: destroy: path: /{actor} method: DELETE - docs: Destroy a dynamic actor. + docs: Destroy a actor. path-parameters: actor: docs: The id of the actor to destroy - type: uuid + type: commons.Id request: name: DestroyActorRequestQuery query-parameters: @@ -80,11 +80,11 @@ service: upgrade: path: /{actor}/upgrade method: POST - docs: Upgrades a dynamic actor. + docs: Upgrades a actor. path-parameters: actor: docs: The id of the actor to upgrade - type: uuid + type: commons.Id request: name: UpgradeActorRequestQuery query-parameters: @@ -96,7 +96,7 @@ service: upgradeAll: path: /upgrade method: POST - docs: Upgrades a dynamic actor. + docs: Upgrades all actors matching the given tags. request: name: UpgradeAllActorsRequestQuery query-parameters: diff --git a/sdks/api/fern/definition/actors/common.yml b/sdks/api/fern/definition/actors/common.yml index c238888b8b..6a62bc8c57 100644 --- a/sdks/api/fern/definition/actors/common.yml +++ b/sdks/api/fern/definition/actors/common.yml @@ -6,7 +6,7 @@ imports: types: Actor: properties: - id: uuid + id: commons.Id region: string tags: unknown runtime: Runtime diff --git a/sdks/api/fern/definition/actors/logs.yml b/sdks/api/fern/definition/actors/logs.yml index 63ac7e1b81..c01f5865c5 100644 --- a/sdks/api/fern/definition/actors/logs.yml +++ b/sdks/api/fern/definition/actors/logs.yml @@ -48,7 +48,7 @@ types: properties: actor_ids: docs: List of actor IDs in these logs. The order of these correspond to the index in the log entry. - type: list + type: list lines: docs: Sorted old to new. type: list diff --git a/sdks/api/fern/definition/common.yml b/sdks/api/fern/definition/common.yml index 9481d860de..d4417d7f6c 100644 --- a/sdks/api/fern/definition/common.yml +++ b/sdks/api/fern/definition/common.yml @@ -38,6 +38,10 @@ errors: type: ErrorBody types: + Id: + type: string + docs: Can be a UUID or base36 encoded binary data. + Identifier: type: string docs: A human readable short identifier used to references resources. Different diff --git a/sdks/api/full/go/actors/client/client.go b/sdks/api/full/go/actors/client/client.go index 91f11675b1..e50e8077b7 100644 --- a/sdks/api/full/go/actors/client/client.go +++ b/sdks/api/full/go/actors/client/client.go @@ -8,7 +8,6 @@ import ( json "encoding/json" errors "errors" fmt "fmt" - uuid "github.com/google/uuid" io "io" http "net/http" url "net/url" @@ -42,10 +41,10 @@ func NewClient(opts ...core.ClientOption) *Client { } } -// Gets a dynamic actor. +// Gets a actor. // // The id of the actor to destroy -func (c *Client) Get(ctx context.Context, actor uuid.UUID, request *actors.ListActorsRequestQuery) (*actors.GetActorResponse, error) { +func (c *Client) Get(ctx context.Context, actor sdk.Id, request *actors.ListActorsRequestQuery) (*actors.GetActorResponse, error) { baseURL := "https://api.rivet.gg" if c.baseURL != "" { baseURL = c.baseURL @@ -237,7 +236,7 @@ func (c *Client) List(ctx context.Context, request *actors.GetActorsRequestQuery return response, nil } -// Create a new dynamic actor. +// Create a new actor. func (c *Client) Create(ctx context.Context, request *actors.CreateActorRequestQuery) (*actors.CreateActorResponse, error) { baseURL := "https://api.rivet.gg" if c.baseURL != "" { @@ -330,10 +329,10 @@ func (c *Client) Create(ctx context.Context, request *actors.CreateActorRequestQ return response, nil } -// Destroy a dynamic actor. +// Destroy a actor. // // The id of the actor to destroy -func (c *Client) Destroy(ctx context.Context, actor uuid.UUID, request *actors.DestroyActorRequestQuery) (*actors.DestroyActorResponse, error) { +func (c *Client) Destroy(ctx context.Context, actor sdk.Id, request *actors.DestroyActorRequestQuery) (*actors.DestroyActorResponse, error) { baseURL := "https://api.rivet.gg" if c.baseURL != "" { baseURL = c.baseURL @@ -424,10 +423,10 @@ func (c *Client) Destroy(ctx context.Context, actor uuid.UUID, request *actors.D return response, nil } -// Upgrades a dynamic actor. +// Upgrades a actor. // // The id of the actor to upgrade -func (c *Client) Upgrade(ctx context.Context, actor uuid.UUID, request *actors.UpgradeActorRequestQuery) (*actors.UpgradeActorResponse, error) { +func (c *Client) Upgrade(ctx context.Context, actor sdk.Id, request *actors.UpgradeActorRequestQuery) (*actors.UpgradeActorResponse, error) { baseURL := "https://api.rivet.gg" if c.baseURL != "" { baseURL = c.baseURL @@ -516,7 +515,7 @@ func (c *Client) Upgrade(ctx context.Context, actor uuid.UUID, request *actors.U return response, nil } -// Upgrades a dynamic actor. +// Upgrades all actors matching the given tags. func (c *Client) UpgradeAll(ctx context.Context, request *actors.UpgradeAllActorsRequestQuery) (*actors.UpgradeAllActorsResponse, error) { baseURL := "https://api.rivet.gg" if c.baseURL != "" { diff --git a/sdks/api/full/go/actors/types.go b/sdks/api/full/go/actors/types.go index 5eb16c1e3f..c6795304b1 100644 --- a/sdks/api/full/go/actors/types.go +++ b/sdks/api/full/go/actors/types.go @@ -62,7 +62,7 @@ type QueryActorsRequestQuery struct { } type Actor struct { - Id uuid.UUID `json:"id"` + Id sdk.Id `json:"id"` Region string `json:"region"` Tags interface{} `json:"tags,omitempty"` Runtime *Runtime `json:"runtime,omitempty"` diff --git a/sdks/api/full/go/types.go b/sdks/api/full/go/types.go index f9ff8ecb20..f89ecce9d2 100644 --- a/sdks/api/full/go/types.go +++ b/sdks/api/full/go/types.go @@ -115,6 +115,9 @@ func (g *GlobalEventNotification) String() string { return fmt.Sprintf("%#v", g) } +// Can be a UUID or base36 encoded binary data. +type Id = string + // A human readable short identifier used to references resources. Different than a `uuid` because this is intended to be human readable. Different than `DisplayName` because this should not include special characters and be short. type Identifier = string diff --git a/sdks/api/full/openapi/openapi.yml b/sdks/api/full/openapi/openapi.yml index 9c730ed011..7c62e11bda 100644 --- a/sdks/api/full/openapi/openapi.yml +++ b/sdks/api/full/openapi/openapi.yml @@ -5,7 +5,7 @@ info: paths: /actors/{actor}: get: - description: Gets a dynamic actor. + description: Gets a actor. operationId: actors_get tags: - Actors @@ -15,8 +15,7 @@ paths: description: The id of the actor to destroy required: true schema: - type: string - format: uuid + $ref: '#/components/schemas/Id' - name: project in: query required: false @@ -78,7 +77,7 @@ paths: security: &ref_0 - BearerAuth: [] delete: - description: Destroy a dynamic actor. + description: Destroy a actor. operationId: actors_destroy tags: - Actors @@ -88,8 +87,7 @@ paths: description: The id of the actor to destroy required: true schema: - type: string - format: uuid + $ref: '#/components/schemas/Id' - name: project in: query required: false @@ -238,7 +236,7 @@ paths: $ref: '#/components/schemas/ErrorBody' security: *ref_0 post: - description: Create a new dynamic actor. + description: Create a new actor. operationId: actors_create tags: - Actors @@ -310,7 +308,7 @@ paths: $ref: '#/components/schemas/ActorsCreateActorRequest' /actors/{actor}/upgrade: post: - description: Upgrades a dynamic actor. + description: Upgrades a actor. operationId: actors_upgrade tags: - Actors @@ -320,8 +318,7 @@ paths: description: The id of the actor to upgrade required: true schema: - type: string - format: uuid + $ref: '#/components/schemas/Id' - name: project in: query required: false @@ -384,7 +381,7 @@ paths: $ref: '#/components/schemas/ActorsUpgradeActorRequest' /actors/upgrade: post: - description: Upgrades a dynamic actor. + description: Upgrades all actors matching the given tags. operationId: actors_upgradeAll tags: - Actors @@ -11007,8 +11004,7 @@ components: type: object properties: id: - type: string - format: uuid + $ref: '#/components/schemas/Id' region: type: string tags: {} @@ -13153,6 +13149,9 @@ components: required: - min_idle_lobbies - max_idle_lobbies + Id: + type: string + description: Can be a UUID or base36 encoded binary data. Identifier: type: string description: >- diff --git a/sdks/api/full/openapi_compat/openapi.yml b/sdks/api/full/openapi_compat/openapi.yml index 6e70ec3a2f..2b3a075f09 100644 --- a/sdks/api/full/openapi_compat/openapi.yml +++ b/sdks/api/full/openapi_compat/openapi.yml @@ -5,7 +5,7 @@ info: paths: '/actors/{actor}': get: - description: Gets a dynamic actor. + description: Gets a actor. operationId: actors_get tags: - Actors @@ -15,8 +15,7 @@ paths: description: The id of the actor to destroy required: true schema: - type: string - format: uuid + $ref: '#/components/schemas/Id' - name: project in: query required: false @@ -78,7 +77,7 @@ paths: security: &ref_0 - BearerAuth: [] delete: - description: Destroy a dynamic actor. + description: Destroy a actor. operationId: actors_destroy tags: - Actors @@ -88,8 +87,7 @@ paths: description: The id of the actor to destroy required: true schema: - type: string - format: uuid + $ref: '#/components/schemas/Id' - name: project in: query required: false @@ -238,7 +236,7 @@ paths: $ref: '#/components/schemas/ErrorBody' security: *ref_0 post: - description: Create a new dynamic actor. + description: Create a new actor. operationId: actors_create tags: - Actors @@ -310,7 +308,7 @@ paths: $ref: '#/components/schemas/ActorsCreateActorRequest' '/actors/{actor}/upgrade': post: - description: Upgrades a dynamic actor. + description: Upgrades a actor. operationId: actors_upgrade tags: - Actors @@ -320,8 +318,7 @@ paths: description: The id of the actor to upgrade required: true schema: - type: string - format: uuid + $ref: '#/components/schemas/Id' - name: project in: query required: false @@ -384,7 +381,7 @@ paths: $ref: '#/components/schemas/ActorsUpgradeActorRequest' /actors/upgrade: post: - description: Upgrades a dynamic actor. + description: Upgrades all actors matching the given tags. operationId: actors_upgradeAll tags: - Actors @@ -11007,8 +11004,7 @@ components: type: object properties: id: - type: string - format: uuid + $ref: '#/components/schemas/Id' region: type: string tags: {} @@ -13153,6 +13149,9 @@ components: required: - min_idle_lobbies - max_idle_lobbies + Id: + type: string + description: Can be a UUID or base36 encoded binary data. Identifier: type: string description: >- diff --git a/sdks/api/full/rust/docs/ActorsActor.md b/sdks/api/full/rust/docs/ActorsActor.md index 597353081f..2e0a4dccfe 100644 --- a/sdks/api/full/rust/docs/ActorsActor.md +++ b/sdks/api/full/rust/docs/ActorsActor.md @@ -4,7 +4,7 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**id** | [**uuid::Uuid**](uuid::Uuid.md) | | +**id** | **String** | Can be a UUID or base36 encoded binary data. | **region** | **String** | | **tags** | Option<[**serde_json::Value**](.md)> | | **runtime** | [**crate::models::ActorsRuntime**](ActorsRuntime.md) | | diff --git a/sdks/api/full/rust/docs/ActorsApi.md b/sdks/api/full/rust/docs/ActorsApi.md index b81a5ce83d..2166a34a6f 100644 --- a/sdks/api/full/rust/docs/ActorsApi.md +++ b/sdks/api/full/rust/docs/ActorsApi.md @@ -20,7 +20,7 @@ Method | HTTP request | Description > crate::models::ActorsCreateActorResponse actors_create(actors_create_actor_request, project, environment, endpoint_type) -Create a new dynamic actor. +Create a new actor. ### Parameters @@ -53,14 +53,14 @@ Name | Type | Description | Required | Notes > serde_json::Value actors_destroy(actor, project, environment, override_kill_timeout) -Destroy a dynamic actor. +Destroy a actor. ### Parameters Name | Type | Description | Required | Notes ------------- | ------------- | ------------- | ------------- | ------------- -**actor** | **uuid::Uuid** | The id of the actor to destroy | [required] | +**actor** | **String** | The id of the actor to destroy | [required] | **project** | Option<**String**> | | | **environment** | Option<**String**> | | | **override_kill_timeout** | Option<**i64**> | The duration to wait for in milliseconds before killing the actor. This should be used to override the default kill timeout if a faster time is needed, say for ignoring a graceful shutdown. | | @@ -86,14 +86,14 @@ Name | Type | Description | Required | Notes > crate::models::ActorsGetActorResponse actors_get(actor, project, environment, endpoint_type) -Gets a dynamic actor. +Gets a actor. ### Parameters Name | Type | Description | Required | Notes ------------- | ------------- | ------------- | ------------- | ------------- -**actor** | **uuid::Uuid** | The id of the actor to destroy | [required] | +**actor** | **String** | The id of the actor to destroy | [required] | **project** | Option<**String**> | | | **environment** | Option<**String**> | | | **endpoint_type** | Option<[**ActorsEndpointType**](.md)> | | | @@ -187,14 +187,14 @@ Name | Type | Description | Required | Notes > serde_json::Value actors_upgrade(actor, actors_upgrade_actor_request, project, environment) -Upgrades a dynamic actor. +Upgrades a actor. ### Parameters Name | Type | Description | Required | Notes ------------- | ------------- | ------------- | ------------- | ------------- -**actor** | **uuid::Uuid** | The id of the actor to upgrade | [required] | +**actor** | **String** | The id of the actor to upgrade | [required] | **actors_upgrade_actor_request** | [**ActorsUpgradeActorRequest**](ActorsUpgradeActorRequest.md) | | [required] | **project** | Option<**String**> | | | **environment** | Option<**String**> | | | @@ -220,7 +220,7 @@ Name | Type | Description | Required | Notes > crate::models::ActorsUpgradeAllActorsResponse actors_upgrade_all(actors_upgrade_all_actors_request, project, environment) -Upgrades a dynamic actor. +Upgrades all actors matching the given tags. ### Parameters diff --git a/sdks/api/full/rust/src/apis/actors_api.rs b/sdks/api/full/rust/src/apis/actors_api.rs index 74213e7230..bf00b1b401 100644 --- a/sdks/api/full/rust/src/apis/actors_api.rs +++ b/sdks/api/full/rust/src/apis/actors_api.rs @@ -174,7 +174,7 @@ pub async fn actors_create( } } -/// Destroy a dynamic actor. +/// Destroy a actor. pub async fn actors_destroy( configuration: &configuration::Configuration, actor: &str, @@ -234,7 +234,7 @@ pub async fn actors_destroy( } } -/// Gets a dynamic actor. +/// Gets a actor. pub async fn actors_get( configuration: &configuration::Configuration, actor: &str, @@ -481,7 +481,7 @@ pub async fn actors_upgrade( } } -/// Upgrades a dynamic actor. +/// Upgrades all actors matching the given tags. pub async fn actors_upgrade_all( configuration: &configuration::Configuration, actors_upgrade_all_actors_request: crate::models::ActorsUpgradeAllActorsRequest, diff --git a/sdks/api/full/rust/src/models/actors_actor.rs b/sdks/api/full/rust/src/models/actors_actor.rs index 76ebff754e..21168917c1 100644 --- a/sdks/api/full/rust/src/models/actors_actor.rs +++ b/sdks/api/full/rust/src/models/actors_actor.rs @@ -10,8 +10,9 @@ #[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] pub struct ActorsActor { + /// Can be a UUID or base36 encoded binary data. #[serde(rename = "id")] - pub id: uuid::Uuid, + pub id: String, #[serde(rename = "region")] pub region: String, #[serde(rename = "tags", deserialize_with = "Option::deserialize")] @@ -37,7 +38,7 @@ pub struct ActorsActor { impl ActorsActor { pub fn new( - id: uuid::Uuid, + id: String, region: String, tags: Option, runtime: crate::models::ActorsRuntime, diff --git a/sdks/api/full/typescript/src/api/resources/actors/client/Client.ts b/sdks/api/full/typescript/src/api/resources/actors/client/Client.ts index 84529898e9..4f939eef9e 100644 --- a/sdks/api/full/typescript/src/api/resources/actors/client/Client.ts +++ b/sdks/api/full/typescript/src/api/resources/actors/client/Client.ts @@ -51,9 +51,9 @@ export class Actors { } /** - * Gets a dynamic actor. + * Gets a actor. * - * @param {string} actor - The id of the actor to destroy + * @param {Rivet.Id} actor - The id of the actor to destroy * @param {Rivet.actors.ListActorsRequestQuery} request * @param {Actors.RequestOptions} requestOptions - Request-specific configuration. * @@ -65,14 +65,14 @@ export class Actors { * @throws {@link Rivet.BadRequestError} * * @example - * await client.actors.get("d5e9c84f-c2b2-4bf4-b4b0-7ffd7a9ffc32", { + * await client.actors.get("string", { * project: "string", * environment: "string", * endpointType: "hostname" * }) */ public async get( - actor: string, + actor: Rivet.Id, request: Rivet.actors.ListActorsRequestQuery = {}, requestOptions?: Actors.RequestOptions, ): Promise { @@ -97,7 +97,7 @@ export class Actors { (await core.Supplier.get(this._options.baseUrl)) ?? (await core.Supplier.get(this._options.environment)) ?? environments.RivetEnvironment.Production, - `/actors/${encodeURIComponent(actor)}`, + `/actors/${encodeURIComponent(serializers.Id.jsonOrThrow(actor))}`, ), method: "GET", headers: { @@ -384,7 +384,7 @@ export class Actors { } /** - * Create a new dynamic actor. + * Create a new actor. * * @param {Rivet.actors.CreateActorRequestQuery} request * @param {Actors.RequestOptions} requestOptions - Request-specific configuration. @@ -572,9 +572,9 @@ export class Actors { } /** - * Destroy a dynamic actor. + * Destroy a actor. * - * @param {string} actor - The id of the actor to destroy + * @param {Rivet.Id} actor - The id of the actor to destroy * @param {Rivet.actors.DestroyActorRequestQuery} request * @param {Actors.RequestOptions} requestOptions - Request-specific configuration. * @@ -586,14 +586,14 @@ export class Actors { * @throws {@link Rivet.BadRequestError} * * @example - * await client.actors.destroy("d5e9c84f-c2b2-4bf4-b4b0-7ffd7a9ffc32", { + * await client.actors.destroy("string", { * project: "string", * environment: "string", * overrideKillTimeout: 1000000 * }) */ public async destroy( - actor: string, + actor: Rivet.Id, request: Rivet.actors.DestroyActorRequestQuery = {}, requestOptions?: Actors.RequestOptions, ): Promise { @@ -616,7 +616,7 @@ export class Actors { (await core.Supplier.get(this._options.baseUrl)) ?? (await core.Supplier.get(this._options.environment)) ?? environments.RivetEnvironment.Production, - `/actors/${encodeURIComponent(actor)}`, + `/actors/${encodeURIComponent(serializers.Id.jsonOrThrow(actor))}`, ), method: "DELETE", headers: { @@ -730,9 +730,9 @@ export class Actors { } /** - * Upgrades a dynamic actor. + * Upgrades a actor. * - * @param {string} actor - The id of the actor to upgrade + * @param {Rivet.Id} actor - The id of the actor to upgrade * @param {Rivet.actors.UpgradeActorRequestQuery} request * @param {Actors.RequestOptions} requestOptions - Request-specific configuration. * @@ -744,7 +744,7 @@ export class Actors { * @throws {@link Rivet.BadRequestError} * * @example - * await client.actors.upgrade("d5e9c84f-c2b2-4bf4-b4b0-7ffd7a9ffc32", { + * await client.actors.upgrade("string", { * project: "string", * environment: "string", * body: { @@ -756,7 +756,7 @@ export class Actors { * }) */ public async upgrade( - actor: string, + actor: Rivet.Id, request: Rivet.actors.UpgradeActorRequestQuery, requestOptions?: Actors.RequestOptions, ): Promise { @@ -775,7 +775,7 @@ export class Actors { (await core.Supplier.get(this._options.baseUrl)) ?? (await core.Supplier.get(this._options.environment)) ?? environments.RivetEnvironment.Production, - `/actors/${encodeURIComponent(actor)}/upgrade`, + `/actors/${encodeURIComponent(serializers.Id.jsonOrThrow(actor))}/upgrade`, ), method: "POST", headers: { @@ -890,7 +890,7 @@ export class Actors { } /** - * Upgrades a dynamic actor. + * Upgrades all actors matching the given tags. * * @param {Rivet.actors.UpgradeAllActorsRequestQuery} request * @param {Actors.RequestOptions} requestOptions - Request-specific configuration. diff --git a/sdks/api/full/typescript/src/api/resources/actors/resources/common/types/Actor.ts b/sdks/api/full/typescript/src/api/resources/actors/resources/common/types/Actor.ts index 6e33dd8a0f..1aeb34bd9f 100644 --- a/sdks/api/full/typescript/src/api/resources/actors/resources/common/types/Actor.ts +++ b/sdks/api/full/typescript/src/api/resources/actors/resources/common/types/Actor.ts @@ -5,7 +5,7 @@ import * as Rivet from "../../../../../index"; export interface Actor { - id: string; + id: Rivet.Id; region: string; tags?: unknown; runtime: Rivet.actors.Runtime; diff --git a/sdks/api/full/typescript/src/api/resources/common/types/Id.ts b/sdks/api/full/typescript/src/api/resources/common/types/Id.ts new file mode 100644 index 0000000000..e670e70d7a --- /dev/null +++ b/sdks/api/full/typescript/src/api/resources/common/types/Id.ts @@ -0,0 +1,8 @@ +/** + * This file was auto-generated by Fern from our API Definition. + */ + +/** + * Can be a UUID or base36 encoded binary data. + */ +export type Id = string; diff --git a/sdks/api/full/typescript/src/api/resources/common/types/index.ts b/sdks/api/full/typescript/src/api/resources/common/types/index.ts index 758ff287df..4fc6c5df79 100644 --- a/sdks/api/full/typescript/src/api/resources/common/types/index.ts +++ b/sdks/api/full/typescript/src/api/resources/common/types/index.ts @@ -1,3 +1,4 @@ +export * from "./Id"; export * from "./Identifier"; export * from "./Bio"; export * from "./Email"; diff --git a/sdks/api/full/typescript/src/serialization/resources/actors/resources/common/types/Actor.ts b/sdks/api/full/typescript/src/serialization/resources/actors/resources/common/types/Actor.ts index 73be90d58d..aad5b7be1c 100644 --- a/sdks/api/full/typescript/src/serialization/resources/actors/resources/common/types/Actor.ts +++ b/sdks/api/full/typescript/src/serialization/resources/actors/resources/common/types/Actor.ts @@ -5,6 +5,7 @@ import * as serializers from "../../../../../index"; import * as Rivet from "../../../../../../api/index"; import * as core from "../../../../../../core"; +import { Id } from "../../../../common/types/Id"; import { Runtime } from "./Runtime"; import { Network } from "./Network"; import { Resources } from "./Resources"; @@ -13,7 +14,7 @@ import { Timestamp } from "../../../../common/types/Timestamp"; export const Actor: core.serialization.ObjectSchema = core.serialization.object({ - id: core.serialization.string(), + id: Id, region: core.serialization.string(), tags: core.serialization.unknown(), runtime: Runtime, @@ -27,7 +28,7 @@ export const Actor: core.serialization.ObjectSchema = core.serialization.string(); + +export declare namespace Id { + export type Raw = string; +} diff --git a/sdks/api/full/typescript/src/serialization/resources/common/types/index.ts b/sdks/api/full/typescript/src/serialization/resources/common/types/index.ts index 758ff287df..4fc6c5df79 100644 --- a/sdks/api/full/typescript/src/serialization/resources/common/types/index.ts +++ b/sdks/api/full/typescript/src/serialization/resources/common/types/index.ts @@ -1,3 +1,4 @@ +export * from "./Id"; export * from "./Identifier"; export * from "./Bio"; export * from "./Email"; diff --git a/sdks/api/runtime/go/actors/client/client.go b/sdks/api/runtime/go/actors/client/client.go index 6d18faaac6..dadb1e321f 100644 --- a/sdks/api/runtime/go/actors/client/client.go +++ b/sdks/api/runtime/go/actors/client/client.go @@ -8,7 +8,6 @@ import ( json "encoding/json" errors "errors" fmt "fmt" - uuid "github.com/google/uuid" io "io" http "net/http" url "net/url" @@ -39,10 +38,10 @@ func NewClient(opts ...core.ClientOption) *Client { } } -// Gets a dynamic actor. +// Gets a actor. // // The id of the actor to destroy -func (c *Client) Get(ctx context.Context, actor uuid.UUID, request *actors.ListActorsRequestQuery) (*actors.GetActorResponse, error) { +func (c *Client) Get(ctx context.Context, actor sdk.Id, request *actors.ListActorsRequestQuery) (*actors.GetActorResponse, error) { baseURL := "https://api.rivet.gg" if c.baseURL != "" { baseURL = c.baseURL @@ -234,7 +233,7 @@ func (c *Client) List(ctx context.Context, request *actors.GetActorsRequestQuery return response, nil } -// Create a new dynamic actor. +// Create a new actor. func (c *Client) Create(ctx context.Context, request *actors.CreateActorRequestQuery) (*actors.CreateActorResponse, error) { baseURL := "https://api.rivet.gg" if c.baseURL != "" { @@ -327,10 +326,10 @@ func (c *Client) Create(ctx context.Context, request *actors.CreateActorRequestQ return response, nil } -// Destroy a dynamic actor. +// Destroy a actor. // // The id of the actor to destroy -func (c *Client) Destroy(ctx context.Context, actor uuid.UUID, request *actors.DestroyActorRequestQuery) (*actors.DestroyActorResponse, error) { +func (c *Client) Destroy(ctx context.Context, actor sdk.Id, request *actors.DestroyActorRequestQuery) (*actors.DestroyActorResponse, error) { baseURL := "https://api.rivet.gg" if c.baseURL != "" { baseURL = c.baseURL @@ -421,10 +420,10 @@ func (c *Client) Destroy(ctx context.Context, actor uuid.UUID, request *actors.D return response, nil } -// Upgrades a dynamic actor. +// Upgrades a actor. // // The id of the actor to upgrade -func (c *Client) Upgrade(ctx context.Context, actor uuid.UUID, request *actors.UpgradeActorRequestQuery) (*actors.UpgradeActorResponse, error) { +func (c *Client) Upgrade(ctx context.Context, actor sdk.Id, request *actors.UpgradeActorRequestQuery) (*actors.UpgradeActorResponse, error) { baseURL := "https://api.rivet.gg" if c.baseURL != "" { baseURL = c.baseURL @@ -513,7 +512,7 @@ func (c *Client) Upgrade(ctx context.Context, actor uuid.UUID, request *actors.U return response, nil } -// Upgrades a dynamic actor. +// Upgrades all actors matching the given tags. func (c *Client) UpgradeAll(ctx context.Context, request *actors.UpgradeAllActorsRequestQuery) (*actors.UpgradeAllActorsResponse, error) { baseURL := "https://api.rivet.gg" if c.baseURL != "" { diff --git a/sdks/api/runtime/go/actors/types.go b/sdks/api/runtime/go/actors/types.go index 5eb16c1e3f..c6795304b1 100644 --- a/sdks/api/runtime/go/actors/types.go +++ b/sdks/api/runtime/go/actors/types.go @@ -62,7 +62,7 @@ type QueryActorsRequestQuery struct { } type Actor struct { - Id uuid.UUID `json:"id"` + Id sdk.Id `json:"id"` Region string `json:"region"` Tags interface{} `json:"tags,omitempty"` Runtime *Runtime `json:"runtime,omitempty"` diff --git a/sdks/api/runtime/go/types.go b/sdks/api/runtime/go/types.go index f9ad9a27aa..731df8e90c 100644 --- a/sdks/api/runtime/go/types.go +++ b/sdks/api/runtime/go/types.go @@ -45,6 +45,9 @@ func (e *ErrorBody) String() string { // Unstructured metadata relating to an error. Must be manually parsed. type ErrorMetadata = interface{} +// Can be a UUID or base36 encoded binary data. +type Id = string + type Pagination struct { Cursor *string `json:"cursor,omitempty"` diff --git a/sdks/api/runtime/openapi/openapi.yml b/sdks/api/runtime/openapi/openapi.yml index 838cc2a4a2..67ce8ecb9a 100644 --- a/sdks/api/runtime/openapi/openapi.yml +++ b/sdks/api/runtime/openapi/openapi.yml @@ -5,7 +5,7 @@ info: paths: /actors/{actor}: get: - description: Gets a dynamic actor. + description: Gets a actor. operationId: actors_get tags: - Actors @@ -15,8 +15,7 @@ paths: description: The id of the actor to destroy required: true schema: - type: string - format: uuid + $ref: '#/components/schemas/Id' - name: project in: query required: false @@ -78,7 +77,7 @@ paths: security: &ref_0 - BearerAuth: [] delete: - description: Destroy a dynamic actor. + description: Destroy a actor. operationId: actors_destroy tags: - Actors @@ -88,8 +87,7 @@ paths: description: The id of the actor to destroy required: true schema: - type: string - format: uuid + $ref: '#/components/schemas/Id' - name: project in: query required: false @@ -238,7 +236,7 @@ paths: $ref: '#/components/schemas/ErrorBody' security: *ref_0 post: - description: Create a new dynamic actor. + description: Create a new actor. operationId: actors_create tags: - Actors @@ -310,7 +308,7 @@ paths: $ref: '#/components/schemas/ActorsCreateActorRequest' /actors/{actor}/upgrade: post: - description: Upgrades a dynamic actor. + description: Upgrades a actor. operationId: actors_upgrade tags: - Actors @@ -320,8 +318,7 @@ paths: description: The id of the actor to upgrade required: true schema: - type: string - format: uuid + $ref: '#/components/schemas/Id' - name: project in: query required: false @@ -384,7 +381,7 @@ paths: $ref: '#/components/schemas/ActorsUpgradeActorRequest' /actors/upgrade: post: - description: Upgrades a dynamic actor. + description: Upgrades all actors matching the given tags. operationId: actors_upgradeAll tags: - Actors @@ -1833,8 +1830,7 @@ components: type: object properties: id: - type: string - format: uuid + $ref: '#/components/schemas/Id' region: type: string tags: {} @@ -2104,6 +2100,9 @@ components: required: - cpu - memory + Id: + type: string + description: Can be a UUID or base36 encoded binary data. WatchResponse: type: object description: Provided by watchable endpoints used in blocking loops. diff --git a/sdks/api/runtime/openapi_compat/openapi.yml b/sdks/api/runtime/openapi_compat/openapi.yml index c15006399d..269ce1068c 100644 --- a/sdks/api/runtime/openapi_compat/openapi.yml +++ b/sdks/api/runtime/openapi_compat/openapi.yml @@ -5,7 +5,7 @@ info: paths: '/actors/{actor}': get: - description: Gets a dynamic actor. + description: Gets a actor. operationId: actors_get tags: - Actors @@ -15,8 +15,7 @@ paths: description: The id of the actor to destroy required: true schema: - type: string - format: uuid + $ref: '#/components/schemas/Id' - name: project in: query required: false @@ -78,7 +77,7 @@ paths: security: &ref_0 - BearerAuth: [] delete: - description: Destroy a dynamic actor. + description: Destroy a actor. operationId: actors_destroy tags: - Actors @@ -88,8 +87,7 @@ paths: description: The id of the actor to destroy required: true schema: - type: string - format: uuid + $ref: '#/components/schemas/Id' - name: project in: query required: false @@ -238,7 +236,7 @@ paths: $ref: '#/components/schemas/ErrorBody' security: *ref_0 post: - description: Create a new dynamic actor. + description: Create a new actor. operationId: actors_create tags: - Actors @@ -310,7 +308,7 @@ paths: $ref: '#/components/schemas/ActorsCreateActorRequest' '/actors/{actor}/upgrade': post: - description: Upgrades a dynamic actor. + description: Upgrades a actor. operationId: actors_upgrade tags: - Actors @@ -320,8 +318,7 @@ paths: description: The id of the actor to upgrade required: true schema: - type: string - format: uuid + $ref: '#/components/schemas/Id' - name: project in: query required: false @@ -384,7 +381,7 @@ paths: $ref: '#/components/schemas/ActorsUpgradeActorRequest' /actors/upgrade: post: - description: Upgrades a dynamic actor. + description: Upgrades all actors matching the given tags. operationId: actors_upgradeAll tags: - Actors @@ -1833,8 +1830,7 @@ components: type: object properties: id: - type: string - format: uuid + $ref: '#/components/schemas/Id' region: type: string tags: {} @@ -2104,6 +2100,9 @@ components: required: - cpu - memory + Id: + type: string + description: Can be a UUID or base36 encoded binary data. WatchResponse: type: object description: Provided by watchable endpoints used in blocking loops. diff --git a/sdks/api/runtime/rust/docs/ActorsActor.md b/sdks/api/runtime/rust/docs/ActorsActor.md index 597353081f..2e0a4dccfe 100644 --- a/sdks/api/runtime/rust/docs/ActorsActor.md +++ b/sdks/api/runtime/rust/docs/ActorsActor.md @@ -4,7 +4,7 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**id** | [**uuid::Uuid**](uuid::Uuid.md) | | +**id** | **String** | Can be a UUID or base36 encoded binary data. | **region** | **String** | | **tags** | Option<[**serde_json::Value**](.md)> | | **runtime** | [**crate::models::ActorsRuntime**](ActorsRuntime.md) | | diff --git a/sdks/api/runtime/rust/docs/ActorsApi.md b/sdks/api/runtime/rust/docs/ActorsApi.md index b81a5ce83d..2166a34a6f 100644 --- a/sdks/api/runtime/rust/docs/ActorsApi.md +++ b/sdks/api/runtime/rust/docs/ActorsApi.md @@ -20,7 +20,7 @@ Method | HTTP request | Description > crate::models::ActorsCreateActorResponse actors_create(actors_create_actor_request, project, environment, endpoint_type) -Create a new dynamic actor. +Create a new actor. ### Parameters @@ -53,14 +53,14 @@ Name | Type | Description | Required | Notes > serde_json::Value actors_destroy(actor, project, environment, override_kill_timeout) -Destroy a dynamic actor. +Destroy a actor. ### Parameters Name | Type | Description | Required | Notes ------------- | ------------- | ------------- | ------------- | ------------- -**actor** | **uuid::Uuid** | The id of the actor to destroy | [required] | +**actor** | **String** | The id of the actor to destroy | [required] | **project** | Option<**String**> | | | **environment** | Option<**String**> | | | **override_kill_timeout** | Option<**i64**> | The duration to wait for in milliseconds before killing the actor. This should be used to override the default kill timeout if a faster time is needed, say for ignoring a graceful shutdown. | | @@ -86,14 +86,14 @@ Name | Type | Description | Required | Notes > crate::models::ActorsGetActorResponse actors_get(actor, project, environment, endpoint_type) -Gets a dynamic actor. +Gets a actor. ### Parameters Name | Type | Description | Required | Notes ------------- | ------------- | ------------- | ------------- | ------------- -**actor** | **uuid::Uuid** | The id of the actor to destroy | [required] | +**actor** | **String** | The id of the actor to destroy | [required] | **project** | Option<**String**> | | | **environment** | Option<**String**> | | | **endpoint_type** | Option<[**ActorsEndpointType**](.md)> | | | @@ -187,14 +187,14 @@ Name | Type | Description | Required | Notes > serde_json::Value actors_upgrade(actor, actors_upgrade_actor_request, project, environment) -Upgrades a dynamic actor. +Upgrades a actor. ### Parameters Name | Type | Description | Required | Notes ------------- | ------------- | ------------- | ------------- | ------------- -**actor** | **uuid::Uuid** | The id of the actor to upgrade | [required] | +**actor** | **String** | The id of the actor to upgrade | [required] | **actors_upgrade_actor_request** | [**ActorsUpgradeActorRequest**](ActorsUpgradeActorRequest.md) | | [required] | **project** | Option<**String**> | | | **environment** | Option<**String**> | | | @@ -220,7 +220,7 @@ Name | Type | Description | Required | Notes > crate::models::ActorsUpgradeAllActorsResponse actors_upgrade_all(actors_upgrade_all_actors_request, project, environment) -Upgrades a dynamic actor. +Upgrades all actors matching the given tags. ### Parameters diff --git a/sdks/api/runtime/rust/src/apis/actors_api.rs b/sdks/api/runtime/rust/src/apis/actors_api.rs index 1a6e93fdf7..9580242611 100644 --- a/sdks/api/runtime/rust/src/apis/actors_api.rs +++ b/sdks/api/runtime/rust/src/apis/actors_api.rs @@ -120,7 +120,7 @@ pub enum ActorsUsageError { } -/// Create a new dynamic actor. +/// Create a new actor. pub async fn actors_create(configuration: &configuration::Configuration, actors_create_actor_request: crate::models::ActorsCreateActorRequest, project: Option<&str>, environment: Option<&str>, endpoint_type: Option) -> Result> { let local_var_configuration = configuration; @@ -161,7 +161,7 @@ pub async fn actors_create(configuration: &configuration::Configuration, actors_ } } -/// Destroy a dynamic actor. +/// Destroy a actor. pub async fn actors_destroy(configuration: &configuration::Configuration, actor: &str, project: Option<&str>, environment: Option<&str>, override_kill_timeout: Option) -> Result> { let local_var_configuration = configuration; @@ -201,7 +201,7 @@ pub async fn actors_destroy(configuration: &configuration::Configuration, actor: } } -/// Gets a dynamic actor. +/// Gets a actor. pub async fn actors_get(configuration: &configuration::Configuration, actor: &str, project: Option<&str>, environment: Option<&str>, endpoint_type: Option) -> Result> { let local_var_configuration = configuration; @@ -371,7 +371,7 @@ pub async fn actors_upgrade(configuration: &configuration::Configuration, actor: } } -/// Upgrades a dynamic actor. +/// Upgrades all actors matching the given tags. pub async fn actors_upgrade_all(configuration: &configuration::Configuration, actors_upgrade_all_actors_request: crate::models::ActorsUpgradeAllActorsRequest, project: Option<&str>, environment: Option<&str>) -> Result> { let local_var_configuration = configuration; diff --git a/sdks/api/runtime/rust/src/models/actors_actor.rs b/sdks/api/runtime/rust/src/models/actors_actor.rs index 0703436cb3..2e16a52360 100644 --- a/sdks/api/runtime/rust/src/models/actors_actor.rs +++ b/sdks/api/runtime/rust/src/models/actors_actor.rs @@ -13,8 +13,9 @@ #[derive(Clone, Debug, PartialEq, Default, Serialize, Deserialize)] pub struct ActorsActor { + /// Can be a UUID or base36 encoded binary data. #[serde(rename = "id")] - pub id: uuid::Uuid, + pub id: String, #[serde(rename = "region")] pub region: String, #[serde(rename = "tags", deserialize_with = "Option::deserialize")] @@ -39,7 +40,7 @@ pub struct ActorsActor { } impl ActorsActor { - pub fn new(id: uuid::Uuid, region: String, tags: Option, runtime: crate::models::ActorsRuntime, network: crate::models::ActorsNetwork, lifecycle: crate::models::ActorsLifecycle, created_at: String) -> ActorsActor { + pub fn new(id: String, region: String, tags: Option, runtime: crate::models::ActorsRuntime, network: crate::models::ActorsNetwork, lifecycle: crate::models::ActorsLifecycle, created_at: String) -> ActorsActor { ActorsActor { id, region, diff --git a/sdks/api/runtime/typescript/src/api/resources/actors/client/Client.ts b/sdks/api/runtime/typescript/src/api/resources/actors/client/Client.ts index 35dbea05b5..9d29de0eff 100644 --- a/sdks/api/runtime/typescript/src/api/resources/actors/client/Client.ts +++ b/sdks/api/runtime/typescript/src/api/resources/actors/client/Client.ts @@ -45,9 +45,9 @@ export class Actors { } /** - * Gets a dynamic actor. + * Gets a actor. * - * @param {string} actor - The id of the actor to destroy + * @param {Rivet.Id} actor - The id of the actor to destroy * @param {Rivet.actors.ListActorsRequestQuery} request * @param {Actors.RequestOptions} requestOptions - Request-specific configuration. * @@ -59,14 +59,14 @@ export class Actors { * @throws {@link Rivet.BadRequestError} * * @example - * await client.actors.get("d5e9c84f-c2b2-4bf4-b4b0-7ffd7a9ffc32", { + * await client.actors.get("string", { * project: "string", * environment: "string", * endpointType: "hostname" * }) */ public async get( - actor: string, + actor: Rivet.Id, request: Rivet.actors.ListActorsRequestQuery = {}, requestOptions?: Actors.RequestOptions, ): Promise { @@ -91,7 +91,7 @@ export class Actors { (await core.Supplier.get(this._options.baseUrl)) ?? (await core.Supplier.get(this._options.environment)) ?? environments.RivetEnvironment.Production, - `/actors/${encodeURIComponent(actor)}`, + `/actors/${encodeURIComponent(serializers.Id.jsonOrThrow(actor))}`, ), method: "GET", headers: { @@ -378,7 +378,7 @@ export class Actors { } /** - * Create a new dynamic actor. + * Create a new actor. * * @param {Rivet.actors.CreateActorRequestQuery} request * @param {Actors.RequestOptions} requestOptions - Request-specific configuration. @@ -566,9 +566,9 @@ export class Actors { } /** - * Destroy a dynamic actor. + * Destroy a actor. * - * @param {string} actor - The id of the actor to destroy + * @param {Rivet.Id} actor - The id of the actor to destroy * @param {Rivet.actors.DestroyActorRequestQuery} request * @param {Actors.RequestOptions} requestOptions - Request-specific configuration. * @@ -580,14 +580,14 @@ export class Actors { * @throws {@link Rivet.BadRequestError} * * @example - * await client.actors.destroy("d5e9c84f-c2b2-4bf4-b4b0-7ffd7a9ffc32", { + * await client.actors.destroy("string", { * project: "string", * environment: "string", * overrideKillTimeout: 1000000 * }) */ public async destroy( - actor: string, + actor: Rivet.Id, request: Rivet.actors.DestroyActorRequestQuery = {}, requestOptions?: Actors.RequestOptions, ): Promise { @@ -610,7 +610,7 @@ export class Actors { (await core.Supplier.get(this._options.baseUrl)) ?? (await core.Supplier.get(this._options.environment)) ?? environments.RivetEnvironment.Production, - `/actors/${encodeURIComponent(actor)}`, + `/actors/${encodeURIComponent(serializers.Id.jsonOrThrow(actor))}`, ), method: "DELETE", headers: { @@ -724,9 +724,9 @@ export class Actors { } /** - * Upgrades a dynamic actor. + * Upgrades a actor. * - * @param {string} actor - The id of the actor to upgrade + * @param {Rivet.Id} actor - The id of the actor to upgrade * @param {Rivet.actors.UpgradeActorRequestQuery} request * @param {Actors.RequestOptions} requestOptions - Request-specific configuration. * @@ -738,7 +738,7 @@ export class Actors { * @throws {@link Rivet.BadRequestError} * * @example - * await client.actors.upgrade("d5e9c84f-c2b2-4bf4-b4b0-7ffd7a9ffc32", { + * await client.actors.upgrade("string", { * project: "string", * environment: "string", * body: { @@ -750,7 +750,7 @@ export class Actors { * }) */ public async upgrade( - actor: string, + actor: Rivet.Id, request: Rivet.actors.UpgradeActorRequestQuery, requestOptions?: Actors.RequestOptions, ): Promise { @@ -769,7 +769,7 @@ export class Actors { (await core.Supplier.get(this._options.baseUrl)) ?? (await core.Supplier.get(this._options.environment)) ?? environments.RivetEnvironment.Production, - `/actors/${encodeURIComponent(actor)}/upgrade`, + `/actors/${encodeURIComponent(serializers.Id.jsonOrThrow(actor))}/upgrade`, ), method: "POST", headers: { @@ -884,7 +884,7 @@ export class Actors { } /** - * Upgrades a dynamic actor. + * Upgrades all actors matching the given tags. * * @param {Rivet.actors.UpgradeAllActorsRequestQuery} request * @param {Actors.RequestOptions} requestOptions - Request-specific configuration. diff --git a/sdks/api/runtime/typescript/src/api/resources/actors/resources/common/types/Actor.ts b/sdks/api/runtime/typescript/src/api/resources/actors/resources/common/types/Actor.ts index 6e33dd8a0f..1aeb34bd9f 100644 --- a/sdks/api/runtime/typescript/src/api/resources/actors/resources/common/types/Actor.ts +++ b/sdks/api/runtime/typescript/src/api/resources/actors/resources/common/types/Actor.ts @@ -5,7 +5,7 @@ import * as Rivet from "../../../../../index"; export interface Actor { - id: string; + id: Rivet.Id; region: string; tags?: unknown; runtime: Rivet.actors.Runtime; diff --git a/sdks/api/runtime/typescript/src/api/resources/common/types/Id.ts b/sdks/api/runtime/typescript/src/api/resources/common/types/Id.ts new file mode 100644 index 0000000000..e670e70d7a --- /dev/null +++ b/sdks/api/runtime/typescript/src/api/resources/common/types/Id.ts @@ -0,0 +1,8 @@ +/** + * This file was auto-generated by Fern from our API Definition. + */ + +/** + * Can be a UUID or base36 encoded binary data. + */ +export type Id = string; diff --git a/sdks/api/runtime/typescript/src/api/resources/common/types/index.ts b/sdks/api/runtime/typescript/src/api/resources/common/types/index.ts index bac758724c..d0e2c3842c 100644 --- a/sdks/api/runtime/typescript/src/api/resources/common/types/index.ts +++ b/sdks/api/runtime/typescript/src/api/resources/common/types/index.ts @@ -1,3 +1,4 @@ +export * from "./Id"; export * from "./WatchResponse"; export * from "./Timestamp"; export * from "./ErrorMetadata"; diff --git a/sdks/api/runtime/typescript/src/serialization/resources/actors/resources/common/types/Actor.ts b/sdks/api/runtime/typescript/src/serialization/resources/actors/resources/common/types/Actor.ts index 73be90d58d..aad5b7be1c 100644 --- a/sdks/api/runtime/typescript/src/serialization/resources/actors/resources/common/types/Actor.ts +++ b/sdks/api/runtime/typescript/src/serialization/resources/actors/resources/common/types/Actor.ts @@ -5,6 +5,7 @@ import * as serializers from "../../../../../index"; import * as Rivet from "../../../../../../api/index"; import * as core from "../../../../../../core"; +import { Id } from "../../../../common/types/Id"; import { Runtime } from "./Runtime"; import { Network } from "./Network"; import { Resources } from "./Resources"; @@ -13,7 +14,7 @@ import { Timestamp } from "../../../../common/types/Timestamp"; export const Actor: core.serialization.ObjectSchema = core.serialization.object({ - id: core.serialization.string(), + id: Id, region: core.serialization.string(), tags: core.serialization.unknown(), runtime: Runtime, @@ -27,7 +28,7 @@ export const Actor: core.serialization.ObjectSchema = core.serialization.string(); + +export declare namespace Id { + export type Raw = string; +} diff --git a/sdks/api/runtime/typescript/src/serialization/resources/common/types/index.ts b/sdks/api/runtime/typescript/src/serialization/resources/common/types/index.ts index bac758724c..d0e2c3842c 100644 --- a/sdks/api/runtime/typescript/src/serialization/resources/common/types/index.ts +++ b/sdks/api/runtime/typescript/src/serialization/resources/common/types/index.ts @@ -1,3 +1,4 @@ +export * from "./Id"; export * from "./WatchResponse"; export * from "./Timestamp"; export * from "./ErrorMetadata"; diff --git a/site/src/content/docs/cloud/api/actors/create.mdx b/site/src/content/docs/cloud/api/actors/create.mdx index 2cc012408f..defcc81447 100644 --- a/site/src/content/docs/cloud/api/actors/create.mdx +++ b/site/src/content/docs/cloud/api/actors/create.mdx @@ -9,7 +9,7 @@ import API_SCHEMA from './../spec.json'; # actors.create ## Description -Create a new dynamic actor. +Create a new actor. ## Code Examples diff --git a/site/src/content/docs/cloud/api/actors/destroy.mdx b/site/src/content/docs/cloud/api/actors/destroy.mdx index 04a32bacb6..b9b360519d 100644 --- a/site/src/content/docs/cloud/api/actors/destroy.mdx +++ b/site/src/content/docs/cloud/api/actors/destroy.mdx @@ -9,7 +9,7 @@ import API_SCHEMA from './../spec.json'; # actors.destroy ## Description -Destroy a dynamic actor. +Destroy a actor. ## Code Examples @@ -33,5 +33,5 @@ await RIVET.actors.destroy({ ## Schema - + diff --git a/site/src/content/docs/cloud/api/actors/get.mdx b/site/src/content/docs/cloud/api/actors/get.mdx index 8ab18e8702..fdcc86641b 100644 --- a/site/src/content/docs/cloud/api/actors/get.mdx +++ b/site/src/content/docs/cloud/api/actors/get.mdx @@ -9,7 +9,7 @@ import API_SCHEMA from './../spec.json'; # actors.get ## Description -Gets a dynamic actor. +Gets a actor. ## Code Examples @@ -33,5 +33,5 @@ await RIVET.actors.get({ ## Schema - + diff --git a/site/src/content/docs/cloud/api/actors/upgrade-all.mdx b/site/src/content/docs/cloud/api/actors/upgrade-all.mdx index a1e6a94139..0600baf110 100644 --- a/site/src/content/docs/cloud/api/actors/upgrade-all.mdx +++ b/site/src/content/docs/cloud/api/actors/upgrade-all.mdx @@ -9,7 +9,7 @@ import API_SCHEMA from './../spec.json'; # actors.upgradeAll ## Description -Upgrades a dynamic actor. +Upgrades all actors matching the given tags. ## Code Examples diff --git a/site/src/content/docs/cloud/api/actors/upgrade.mdx b/site/src/content/docs/cloud/api/actors/upgrade.mdx index a8c305de53..48fcb2dc30 100644 --- a/site/src/content/docs/cloud/api/actors/upgrade.mdx +++ b/site/src/content/docs/cloud/api/actors/upgrade.mdx @@ -9,7 +9,7 @@ import API_SCHEMA from './../spec.json'; # actors.upgrade ## Description -Upgrades a dynamic actor. +Upgrades a actor. ## Code Examples @@ -36,6 +36,6 @@ await RIVET.actors.upgrade({ ## Schema - +