Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
220 changes: 109 additions & 111 deletions Cargo.lock

Large diffs are not rendered by default.

6 changes: 4 additions & 2 deletions Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,14 +1,16 @@
[workspace]
members = [
"example-build",
"example-macro",
"progenitor",
"progenitor-client",
"progenitor-impl",
"progenitor-macro",
"example-build",
"example-macro",
]

default-members = [
"example-build",
"example-macro",
"progenitor",
"progenitor-client",
"progenitor-impl",
Expand Down
4 changes: 2 additions & 2 deletions example-build/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,11 @@ edition = "2018"

[dependencies]
anyhow = "1.0"
chrono = { version = "0.4", features = ["serde"] }
percent-encoding = "2.1"
serde = { version = "1.0", features = ["derive"] }
reqwest = { version = "0.11", features = ["json", "stream"] }
serde = { version = "1.0", features = ["derive"] }
uuid = { version = "0.8", features = ["serde", "v4"] }
chrono = { version = "0.4", features = ["serde"] }

[build-dependencies]
progenitor = { path = "../progenitor" }
Expand Down
7 changes: 4 additions & 3 deletions example-macro/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,11 @@ authors = ["Adam H. Leventhal <ahl@oxidecomputer.com>"]
edition = "2018"

[dependencies]
progenitor = { path = "../progenitor" }
anyhow = "1.0"
chrono = { version = "0.4", features = ["serde"] }
percent-encoding = "2.1"
serde = { version = "1.0", features = ["derive"] }
progenitor = { path = "../progenitor" }
reqwest = { version = "0.11", features = ["json", "stream"] }
schemars = "0.8"
serde = { version = "1.0", features = ["derive"] }
uuid = { version = "0.8", features = ["serde", "v4"] }
chrono = { version = "0.4", features = ["serde"] }
19 changes: 13 additions & 6 deletions example-macro/src/main.rs
Original file line number Diff line number Diff line change
@@ -1,16 +1,23 @@
// Copyright 2021 Oxide Computer Company
// Copyright 2022 Oxide Computer Company

use progenitor::generate_api;

generate_api!(
"../sample_openapi/keeper.json",
(),
|_, request| {
spec = "../sample_openapi/keeper.json",
inner_type = (),
pre_hook = (|_, request| {
println!("doing this {:?}", request);
},
crate::all_done
}),
post_hook = crate::all_done,
derives = [schemars::JsonSchema],
);

fn all_done(_: &(), _result: &reqwest::Result<reqwest::Response>) {}

mod buildomat {
use progenitor::generate_api;

generate_api!("../sample_openapi/buildomat.json");
}

fn main() {}
8 changes: 4 additions & 4 deletions progenitor-impl/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -8,19 +8,19 @@ description = "An OpenAPI client generator - core implementation"

[dependencies]
anyhow = "1.0"
convert_case = "0.4"
getopts = "0.2"
indexmap = "1.7"
openapiv3 = "1.0.0-beta.5"
openapiv3 = "1.0.0"
proc-macro2 = "1.0"
quote = "1.0"
regex = "1.5"
rustfmt-wrapper = "0.1"
schemars = "0.8"
schemars = { version = "0.8", features = [ "chrono", "uuid" ] }
serde = { version = "1.0", features = [ "derive" ] }
serde_json = "1.0"
convert_case = "0.4"
typify = { git = "https://github.com/oxidecomputer/typify" }
thiserror = "1.0"
typify = { git = "https://github.com/oxidecomputer/typify" }

[dev-dependencies]
expectorate = "1.0"
7 changes: 6 additions & 1 deletion progenitor-impl/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
// Copyright 2021 Oxide Computer Company
// Copyright 2022 Oxide Computer Company

use std::{cmp::Ordering, collections::HashMap};

Expand Down Expand Up @@ -108,6 +108,11 @@ impl Generator {
self
}

pub fn with_derive(&mut self, derive: TokenStream) -> &mut Self {
self.type_space.add_derive(derive);
self
}

pub fn generate_tokens(&mut self, spec: &OpenAPI) -> Result<TokenStream> {
// Convert our components dictionary to schemars
let schemas = spec
Expand Down
6 changes: 4 additions & 2 deletions progenitor-macro/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,13 @@ repository = "https://github.com/oxidecomputer/progenitor.git"
description = "An OpenAPI client generator - macros"

[dependencies]
openapiv3 = "1.0.0-beta.5"
openapiv3 = "1.0.0"
proc-macro2 = "1.0"
progenitor-impl = { path = "../progenitor-impl" }
quote = "1.0"
proc-macro2 = "1.0"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
serde_tokenstream = "0.1.3"
syn = "1.0"

[lib]
Expand Down
132 changes: 63 additions & 69 deletions progenitor-macro/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,15 +1,14 @@
// Copyright 2021 Oxide Computer Company
// Copyright 2022 Oxide Computer Company

use std::path::Path;

use openapiv3::OpenAPI;
use proc_macro::TokenStream;
use progenitor_impl::Generator;
use quote::{quote, ToTokens};
use syn::{
parse::{Parse, ParseStream},
ExprClosure, LitStr, Token,
};
use serde::Deserialize;
use serde_tokenstream::ParseWrapper;
use syn::LitStr;

#[proc_macro]
pub fn generate_api(item: TokenStream) -> TokenStream {
Expand All @@ -19,101 +18,96 @@ pub fn generate_api(item: TokenStream) -> TokenStream {
}
}

#[derive(Deserialize)]
struct Settings {
file: LitStr,
inner: Option<proc_macro2::TokenStream>,
pre: Option<proc_macro2::TokenStream>,
post: Option<proc_macro2::TokenStream>,
spec: ParseWrapper<LitStr>,
inner_type: Option<ParseWrapper<syn::Type>>,
pre_hook: Option<ParseWrapper<ClosureOrPath>>,
post_hook: Option<ParseWrapper<ClosureOrPath>>,
#[serde(default)]
derives: Vec<ParseWrapper<syn::Path>>,
}

impl Parse for Settings {
fn parse(input: ParseStream) -> Result<Self, syn::Error> {
let file = input.parse::<LitStr>()?;
let inner = parse_inner(input)?;
let pre = parse_hook(input)?;
let post = parse_hook(input)?;
#[derive(Debug)]
struct ClosureOrPath(proc_macro2::TokenStream);

// Optional trailing comma.
if input.peek(Token!(,)) {
let _ = input.parse::<Token!(,)>();
}
impl syn::parse::Parse for ClosureOrPath {
fn parse(input: syn::parse::ParseStream) -> syn::Result<Self> {
let lookahead = input.lookahead1();

Ok(Settings {
file,
inner,
pre,
post,
})
}
}
if lookahead.peek(syn::token::Paren) {
let group: proc_macro2::Group = input.parse()?;
return syn::parse2::<Self>(group.stream());
}

fn parse_inner(
input: ParseStream,
) -> Result<Option<proc_macro2::TokenStream>, syn::Error> {
if input.is_empty() {
return Ok(None);
}
let _: Token!(,) = input.parse()?;
if input.is_empty() {
return Ok(None);
}
Ok(Some(input.parse::<syn::Type>()?.to_token_stream()))
}
if let Ok(closure) = input.parse::<syn::ExprClosure>() {
return Ok(Self(closure.to_token_stream()));
}

fn parse_hook(
input: ParseStream,
) -> Result<Option<proc_macro2::TokenStream>, syn::Error> {
if input.is_empty() {
return Ok(None);
}
let _: Token!(,) = input.parse()?;
if input.is_empty() {
return Ok(None);
}
if let Ok(closure) = input.parse::<ExprClosure>() {
Ok(Some(closure.to_token_stream()))
} else {
Ok(Some(input.parse::<syn::Path>()?.to_token_stream()))
input
.parse::<syn::Path>()
.map(|path| Self(path.to_token_stream()))
}
}

fn do_generate_api(item: TokenStream) -> Result<TokenStream, syn::Error> {
let Settings {
file,
inner,
pre,
post,
} = syn::parse::<Settings>(item)?;
let (spec, inner_type, pre_hook, post_hook, derives) =
if let Ok(spec) = syn::parse::<LitStr>(item.clone()) {
(spec, None, None, None, Vec::new())
} else {
let Settings {
spec,
inner_type,
pre_hook,
post_hook,
derives,
} = serde_tokenstream::from_tokenstream(&item.into())?;
(
spec.into_inner(),
inner_type.map(|x| x.into_inner()),
pre_hook.map(|x| x.into_inner()),
post_hook.map(|x| x.into_inner()),
derives.into_iter().map(ParseWrapper::into_inner).collect(),
)
};

let dir = std::env::var("CARGO_MANIFEST_DIR").map_or_else(
|_| std::env::current_dir().unwrap(),
|s| Path::new(&s).to_path_buf(),
);

let path = dir.join(file.value());
let path = dir.join(spec.value());
let path_str = path.to_string_lossy();

let spec: OpenAPI =
let oapi: OpenAPI =
serde_json::from_reader(std::fs::File::open(&path).map_err(|e| {
syn::Error::new(
file.span(),
spec.span(),
format!("couldn't read file {}: {}", path_str, e.to_string()),
)
})?)
.map_err(|e| {
syn::Error::new(
file.span(),
spec.span(),
format!("failed to parse {}: {}", path_str, e.to_string()),
)
})?;

let mut builder = Generator::new();
inner.map(|inner_type| builder.with_inner_type(inner_type));
pre.map(|pre_hook| builder.with_pre_hook(pre_hook));
post.map(|post_hook| builder.with_post_hook(post_hook));
let code = builder.generate_tokens(&spec).map_err(|e| {
inner_type.map(|inner_type| {
builder.with_inner_type(inner_type.to_token_stream())
});
pre_hook.map(|pre_hook| builder.with_pre_hook(pre_hook.0));
post_hook.map(|post_hook| builder.with_post_hook(post_hook.0));

derives.into_iter().for_each(|derive| {
builder.with_derive(derive.to_token_stream());
});

let code = builder.generate_tokens(&oapi).map_err(|e| {
syn::Error::new(
file.span(),
format!("generation error for {}: {}", file.value(), e.to_string()),
spec.span(),
format!("generation error for {}: {}", spec.value(), e.to_string()),
)
})?;

Expand Down
3 changes: 2 additions & 1 deletion progenitor/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ progenitor-macro = { path = "../progenitor-macro" }
progenitor-impl = { path = "../progenitor-impl" }
anyhow = "1.0"
getopts = "0.2"
openapiv3 = "1.0.0-beta.5"
openapiv3 = "1.0.0"
serde = { version = "1.0", features = [ "derive" ] }
serde_json = "1.0"

Expand All @@ -20,4 +20,5 @@ chrono = { version = "0.4", features = ["serde"] }
futures = "0.3"
percent-encoding = "2.1"
reqwest = { version = "0.11", features = ["json", "stream"] }
schemars = "0.8"
uuid = { version = "0.8", features = ["serde", "v4"] }