Skip to content

Commit

Permalink
feat: Generate lexicon token as const &str (#179)
Browse files Browse the repository at this point in the history
* Update codegen to generate lex token as const str

* Update API, generated by updated codegen
  • Loading branch information
sugyan committed May 23, 2024
1 parent b3af62a commit 813c713
Show file tree
Hide file tree
Showing 7 changed files with 90 additions and 43 deletions.
24 changes: 12 additions & 12 deletions atrium-api/src/app/bsky/feed/defs.rs

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions atrium-api/src/app/bsky/graph/defs.rs

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

14 changes: 7 additions & 7 deletions atrium-api/src/com/atproto/moderation/defs.rs

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

8 changes: 4 additions & 4 deletions atrium-api/src/tools/ozone/moderation/defs.rs

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

51 changes: 51 additions & 0 deletions atrium-api/src/types.rs
Original file line number Diff line number Diff line change
Expand Up @@ -110,6 +110,7 @@ impl Eq for UnknownData {}
mod tests {
use super::*;
use serde_json::{from_str, to_string};
use std::collections::BTreeMap;

const CID_LINK_JSON: &str =
r#"{"$link":"bafkreibme22gw2h7y2h7tg2fhqotaqjucnbc24deqo72b6mkl2egezxhvy"}"#;
Expand Down Expand Up @@ -198,4 +199,54 @@ mod tests {
}))
);
}

#[test]
fn test_union() {
#[derive(serde::Serialize, serde::Deserialize, Debug, Clone, PartialEq, Eq)]
#[serde(tag = "$type")]
enum FooRefs {
#[serde(rename = "example.com#bar")]
Bar(Box<Bar>),
#[serde(rename = "example.com#baz")]
Baz(Box<Baz>),
}

#[derive(serde::Serialize, serde::Deserialize, Debug, Clone, PartialEq, Eq)]
struct Bar {
bar: String,
}

#[derive(serde::Serialize, serde::Deserialize, Debug, Clone, PartialEq, Eq)]
struct Baz {
baz: i32,
}

type Foo = Union<FooRefs>;

let foo = serde_json::from_str::<Foo>(r#"{"$type":"example.com#bar","bar":"bar"}"#)
.expect("failed to deserialize foo");
assert_eq!(
foo,
Union::Refs(FooRefs::Bar(Box::new(Bar {
bar: String::from("bar")
})))
);

let foo = serde_json::from_str::<Foo>(r#"{"$type":"example.com#baz","baz":42}"#)
.expect("failed to deserialize foo");
assert_eq!(foo, Union::Refs(FooRefs::Baz(Box::new(Baz { baz: 42 }))));

let foo = serde_json::from_str::<Foo>(r#"{"$type":"example.com#foo","foo":true}"#)
.expect("failed to deserialize foo");
assert_eq!(
foo,
Union::Unknown(UnknownData {
r#type: String::from("example.com#foo"),
data: Ipld::Map(BTreeMap::from_iter([(
String::from("foo"),
Ipld::Bool(true)
)]))
})
);
}
}
4 changes: 2 additions & 2 deletions lexicon/atrium-codegen/src/generator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -39,14 +39,14 @@ pub(crate) fn generate_schemas(
}
// main def
if name == "main" {
tokens.push(user_type(def, basename, true)?);
tokens.push(user_type(def, &schema.id, basename, true)?);
} else {
names.push(name);
}
}
// other defs
for &name in names.iter().sorted() {
tokens.push(user_type(&schema.defs[name], name, false)?);
tokens.push(user_type(&schema.defs[name], &schema.id, name, false)?);
}
// ref unions
tokens.push(ref_unions(&schema.id, &find_ref_unions(&schema.defs))?);
Expand Down
28 changes: 12 additions & 16 deletions lexicon/atrium-codegen/src/token_stream.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
use atrium_lex::lexicon::*;
use heck::{ToPascalCase, ToSnakeCase};
use heck::{ToPascalCase, ToShoutySnakeCase, ToSnakeCase};
use itertools::Itertools;
use proc_macro2::TokenStream;
use quote::{format_ident, quote};
Expand All @@ -13,14 +13,19 @@ enum OutputType {
Bytes,
}

pub fn user_type(def: &LexUserType, name: &str, is_main: bool) -> Result<TokenStream> {
pub fn user_type(
def: &LexUserType,
schema_id: &str,
name: &str,
is_main: bool,
) -> Result<TokenStream> {
let user_type = match def {
LexUserType::Record(record) => lex_record(record)?,
LexUserType::XrpcQuery(query) => lex_query(query)?,
LexUserType::XrpcProcedure(procedure) => lex_procedure(procedure)?,
LexUserType::XrpcSubscription(subscription) => lex_subscription(subscription)?,
LexUserType::Array(array) => lex_array(array, name)?,
LexUserType::Token(token) => lex_token(token, name)?,
LexUserType::Token(token) => lex_token(token, name, schema_id)?,
LexUserType::Object(object) => lex_object(object, if is_main { "Main" } else { name })?,
LexUserType::String(string) => lex_string(string, name)?,
_ => unimplemented!("{def:?}"),
Expand Down Expand Up @@ -245,13 +250,13 @@ fn lex_array(array: &LexArray, name: &str) -> Result<TokenStream> {
})
}

fn lex_token(token: &LexToken, name: &str) -> Result<TokenStream> {
fn lex_token(token: &LexToken, name: &str, schema_id: &str) -> Result<TokenStream> {
let description = description(&token.description);
let token_name = format_ident!("{}", name.to_pascal_case());
// TODO
let token_name = format_ident!("{}", name.to_shouty_snake_case());
let token_value = format!("{schema_id}#{name}");
Ok(quote! {
#description
pub struct #token_name;
pub const #token_name: &str = #token_value;
})
}

Expand Down Expand Up @@ -314,11 +319,6 @@ fn lex_object_property(
LexObjectProperty::String(string) => string_type(string)?,
LexObjectProperty::Unknown(unknown) => unknown_type(unknown, Some(name))?,
};
// TODO: must be determined
if field_type.is_empty() {
return Ok(quote!());
}
// TODO: other keywords?
let field_name = format_ident!(
"{}",
if name == "ref" || name == "type" {
Expand Down Expand Up @@ -412,10 +412,6 @@ fn array_type(
)?,
_ => unimplemented!("{:?}", array.items),
};
// TODO: must be determined
if item_type.is_empty() {
return Ok((description, quote!()));
}
Ok((description, quote!(Vec<#item_type>)))
}

Expand Down

0 comments on commit 813c713

Please sign in to comment.