Skip to content

Commit

Permalink
Use DataPayload::from_static_ref in databake (#3500)
Browse files Browse the repository at this point in the history
  • Loading branch information
robertbastian committed Jun 7, 2023
1 parent 4925a50 commit 5e92ca1
Show file tree
Hide file tree
Showing 276 changed files with 11,643 additions and 11,894 deletions.
19 changes: 0 additions & 19 deletions components/datetime/src/provider/calendar/skeletons.rs
Original file line number Diff line number Diff line change
Expand Up @@ -110,22 +110,3 @@ impl databake::Bake for DateSkeletonPatternsV1Marker {
}
}
}

type BakedDateSkeletonPatternsV1 =
&'static [(&'static [crate::fields::Field], PatternPlurals<'static>)];

impl zerofrom::ZeroFrom<'static, BakedDateSkeletonPatternsV1> for DateSkeletonPatternsV1<'static> {
fn zero_from(other: &'static BakedDateSkeletonPatternsV1) -> Self {
Self(
other
.iter()
.map(|(fields, pattern)| {
(
SkeletonV1(Skeleton(fields.iter().cloned().collect())),
zerofrom::ZeroFrom::zero_from(pattern),
)
})
.collect(),
)
}
}
6 changes: 6 additions & 0 deletions components/datetime/src/skeleton/reference.rs
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,12 @@ impl From<Vec<fields::Field>> for Skeleton {
}
}

impl From<&[fields::Field]> for Skeleton {
fn from(fields: &[fields::Field]) -> Self {
Self(fields.into())
}
}

/// Convert a Pattern into a Skeleton. This will remove all of the string literals, and sort
/// the fields into the canonical sort order. Not all fields are supported by Skeletons, so map
/// fields into skeleton-appropriate ones. For instance, in the "ja" locale the pattern "aK:mm"
Expand Down
122 changes: 49 additions & 73 deletions provider/datagen/src/baked_exporter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -137,13 +137,11 @@ pub struct BakedExporter {
/// Data required to write the implementations
struct ImplData {
marker: SyncTokenStream,
lookup_ident: SyncTokenStream,
feature: SyncTokenStream,
macro_ident: SyncTokenStream,
prefixed_macro_ident: SyncTokenStream,
hash_ident: SyncTokenStream,
mod_ident: SyncTokenStream,
into_any_payload: SyncTokenStream,
}

impl std::fmt::Debug for BakedExporter {
Expand Down Expand Up @@ -193,7 +191,7 @@ impl BakedExporter {
relative_path: P,
data: TokenStream,
) -> Result<(), DataError> {
let path = self.mod_directory.join(&relative_path).with_extension("rs");
let path = self.mod_directory.join(&relative_path);

let mut formatted = if self.pretty {
use std::process::{Command, Stdio};
Expand Down Expand Up @@ -332,8 +330,6 @@ impl DataExporter for BakedExporter {
.replace('@', "_v")
.replace('/', "_");

let lookup_ident = format!("lookup_{ident}").parse::<TokenStream>().unwrap();

let mut singleton = None.into_iter();

let lookup = match values.iter().map(|(_, l)| l.len()).sum() {
Expand All @@ -349,8 +345,11 @@ impl DataExporter for BakedExporter {
// Exposing singleton structs separately allows us to get rid of fallibility by using
// the struct directly.
singleton = Some(quote! {
#[doc(hidden)]
pub const #singleton_ident: &'static #struct_type = &#bake;
#[clippy::msrv = "1.61"]
impl $provider {
#[doc(hidden)]
pub const #singleton_ident: &'static #struct_type = &#bake;
}
})
.into_iter();

Expand Down Expand Up @@ -410,15 +409,22 @@ impl DataExporter for BakedExporter {
}
};

let into_any_payload = if is_datetime_skeletons {
let into_data_payload = if is_datetime_skeletons {
quote! {
.map(icu_provider::prelude::zerofrom::ZeroFrom::zero_from)
.map(icu_provider::DataPayload::<#marker>::from_owned)
.map(icu_provider::DataPayload::wrap_into_any_payload)
icu_provider::DataPayload::from_owned(icu_datetime::provider::calendar::DateSkeletonPatternsV1(
payload
.iter()
.map(|(fields, pattern)| (
icu_datetime::provider::calendar::SkeletonV1((*fields).into()),
icu_provider::prelude::zerofrom::ZeroFrom::zero_from(pattern)
))
.collect(),
))

}
} else {
quote! {
.map(icu_provider::AnyPayload::from_static_ref)
icu_provider::DataPayload::from_static_ref(payload)
}
};

Expand All @@ -428,7 +434,7 @@ impl DataExporter for BakedExporter {
);
let prefixed_macro_ident = format!("__impl_{ident}").parse::<TokenStream>().unwrap();
self.write_to_file(
PathBuf::from(format!("macros/{}", ident)),
PathBuf::from(format!("macros/{}.data.rs", ident)),
quote!{
#[doc = #doc]
/// hardcoded in this file. This allows the struct to be used with
Expand All @@ -437,28 +443,19 @@ impl DataExporter for BakedExporter {
#[macro_export]
macro_rules! #prefixed_macro_ident {
($provider:path) => {

#[clippy::msrv = "1.61"]
impl $provider {
#(#singleton)*

#[doc(hidden)]
pub fn #lookup_ident(locale: &icu_provider::DataLocale) -> Result<&'static #struct_type, icu_provider::DataErrorKind> {
#lookup
}

}
#(#singleton)*

#[clippy::msrv = "1.61"]
impl icu_provider::DataProvider<#marker> for $provider {
fn load(
&self,
req: icu_provider::DataRequest,
) -> Result<icu_provider::DataResponse<#marker>, icu_provider::DataError> {
match Self::#lookup_ident(&req.locale) {
let locale = req.locale;
match {#lookup} {
Ok(payload) => Ok(icu_provider::DataResponse {
metadata: Default::default(),
payload: Some(icu_provider::DataPayload::from_owned(icu_provider::prelude::zerofrom::ZeroFrom::zero_from(payload))),
payload: Some(#into_data_payload),
}),
Err(e) => Err(e.with_req(<#marker as icu_provider::KeyedDataMarker>::KEY, req))
}
Expand All @@ -471,13 +468,11 @@ impl DataExporter for BakedExporter {

let data = ImplData {
feature: feature.to_string(),
lookup_ident: lookup_ident.to_string(),
marker: quote!(#marker).to_string(),
macro_ident: format!("impl_{ident}"),
prefixed_macro_ident: prefixed_macro_ident.to_string(),
hash_ident: ident.to_ascii_uppercase(),
mod_ident: ident,
into_any_payload: into_any_payload.to_string(),
};

self.impl_data
Expand All @@ -497,10 +492,6 @@ impl DataExporter for BakedExporter {
.values()
.map(|data| data.feature.parse::<TokenStream>().unwrap())
.collect::<Vec<_>>();
let lookup_idents = data
.values()
.map(|data| data.lookup_ident.parse::<TokenStream>().unwrap())
.collect::<Vec<_>>();
let markers = data
.values()
.map(|data| data.marker.parse::<TokenStream>().unwrap())
Expand All @@ -514,6 +505,10 @@ impl DataExporter for BakedExporter {
.values()
.map(|data| data.mod_ident.parse::<TokenStream>().unwrap())
.collect::<Vec<_>>();
let file_paths = data
.values()
.map(|data| format!("macros/{}.data.rs", data.mod_ident))
.collect::<Vec<_>>();

// We prefix all macros with `__`, as these will be automatically exported at the crate root, which is annoying
// for crates that include the data but don't want it to be public. We then reexport them as items that use
Expand All @@ -527,49 +522,15 @@ impl DataExporter for BakedExporter {
.values()
.map(|data| data.hash_ident.parse::<TokenStream>().unwrap())
.collect::<Vec<_>>();
let into_any_payloads = data
.values()
.map(|data| data.into_any_payload.parse::<TokenStream>().unwrap())
.collect::<Vec<_>>();

let any_body = if data.is_empty() {
quote! {
Err(icu_provider::DataErrorKind::MissingDataKey.with_req(key, req))
}
} else {
quote! {
#(
#features
const #hash_idents: icu_provider::DataKeyHash = <#markers as icu_provider::KeyedDataMarker>::KEY.hashed();
)*
match key.hashed() {
#(
#features
#hash_idents => Self::#lookup_idents(&req.locale) #into_any_payloads,
)*
_ => Err(icu_provider::DataErrorKind::MissingDataKey),
}
.map(|payload| icu_provider::AnyResponse {
payload: Some(payload),
metadata: Default::default(),
})
.map_err(|e| e.with_req(key, req))
}
};

self.write_to_file(
PathBuf::from("macros"),
PathBuf::from("macros.rs"),
quote! {

#[macro_use]
mod macros {
#(
#[macro_use]
mod #mod_idents;
)*
}

#(
#[macro_use]
#[path = #file_paths]
mod #mod_idents;
#[doc(inline)]
pub use #prefixed_macro_idents as #macro_idents;
)*
Expand Down Expand Up @@ -612,7 +573,22 @@ impl DataExporter for BakedExporter {
#[clippy::msrv = "1.61"]
impl icu_provider::AnyProvider for $provider {
fn load_any(&self, key: icu_provider::DataKey, req: icu_provider::DataRequest) -> Result<icu_provider::AnyResponse, icu_provider::DataError> {
#any_body
#(
#features
const #hash_idents: icu_provider::DataKeyHash = <#markers as icu_provider::KeyedDataMarker>::KEY.hashed();
)*
match key.hashed() {
#(
#features
#hash_idents => icu_provider::DataProvider::<#markers>::load(self, req)
.and_then(|r| r.take_metadata_and_payload())
.map(|(metadata, payload)| icu_provider::AnyResponse {
payload: Some(payload.wrap_into_any_payload()),
metadata,
}),
)*
_ => Err(icu_provider::DataErrorKind::MissingDataKey.with_req(key, req)),
}
}
}
}
Expand All @@ -624,7 +600,7 @@ impl DataExporter for BakedExporter {

// For backwards compatibility
self.write_to_file(
PathBuf::from("mod"),
PathBuf::from("mod.rs"),
quote! {
include!("macros.rs");
#[clippy::msrv = "1.61"]
Expand All @@ -635,7 +611,7 @@ impl DataExporter for BakedExporter {

// For backwards compatibility
self.write_to_file(
PathBuf::from("any"),
PathBuf::from("any.rs"),
quote! {
// This assumes that `mod.rs` is already included.
impl_any_provider!(BakedDataProvider);
Expand Down
Loading

0 comments on commit 5e92ca1

Please sign in to comment.