diff --git a/Cargo.lock b/Cargo.lock index 45edf1832..623daf990 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -589,7 +589,7 @@ dependencies = [ [[package]] name = "bson" version = "3.0.0" -source = "git+https://github.com/mongodb/bson-rust?branch=main#69f82fb1c99fa8aa3be3dee22e1834efb7428c66" +source = "git+https://github.com/mongodb/bson-rust?branch=main#440bfeb54d68edaff4a86e57de30e2d0adcdb5fd" dependencies = [ "ahash", "base64 0.22.1", diff --git a/etc/gen_atlas_search/src/main.rs b/etc/gen_atlas_search/src/main.rs index 4a1346918..513abf3e7 100644 --- a/etc/gen_atlas_search/src/main.rs +++ b/etc/gen_atlas_search/src/main.rs @@ -1,3 +1,5 @@ +use std::path::Path; + use convert_case::{Case, Casing}; use proc_macro2::TokenStream; use quote::format_ident; @@ -39,31 +41,43 @@ impl Operator { fn gen_helper(&self) -> TokenStream { let name_text = &self.name; - let name_ident = format_ident!("{}", name_text.to_case(Case::Pascal)); - let constr_ident = format_ident!("{}", name_text.to_case(Case::Snake)); + let ident_base = match name_text.as_str() { + "in" => "searchIn", + _ => name_text, + }; + let name_ident = format_ident!("{}", ident_base.to_case(Case::Pascal)); + let constr_ident = format_ident!("{}", ident_base.to_case(Case::Snake)); let mut required_args = TokenStream::new(); + let mut required_arg_names = TokenStream::new(); let mut init_doc = TokenStream::new(); let mut setters = TokenStream::new(); for arg in &self.arguments { - let ident = format_ident!("{}", arg.name.to_case(Case::Snake)); - let rust_type = arg.rust_type(); + let ident = format_ident!( + "{}", + match arg.name.as_str() { + // `box` is a reserved word + "box" => "geo_box".to_owned(), + _ => arg.name.to_case(Case::Snake), + } + ); + let rust_type = arg.rust_type(&self.name); let type_ = rust_type.tokens(); let arg_name = &arg.name; let init_expr = rust_type.bson_expr(&ident); if arg.optional.unwrap_or(false) { - let tvars = rust_type.variables(); setters.push(parse_quote! { #[allow(missing_docs)] - pub fn #ident<#tvars>(mut self, #ident: #type_) -> Self { - self.stage.insert(#arg_name, #init_expr); + pub fn #ident(mut self, #ident: #type_) -> Self { + self.spec.insert(#arg_name, #init_expr); self } }); } else { required_args.push(parse_quote! { #ident : #type_, }); + required_arg_names.push(parse_quote! { #ident, }); init_doc.push(parse_quote! { #arg_name : #init_expr, }); } } @@ -73,21 +87,27 @@ impl Operator { "For more details, see the [{name_text} operator reference]({}).", self.link ); + let struct_doc = format!( + "`{name_text}` Atlas Search operator. Construct with \ + [`{constr_ident}`]({constr_ident}())." + ); parse_quote! { - #[allow(missing_docs)] + #[doc = #struct_doc] pub struct #name_ident; - impl AtlasSearch<#name_ident> { - #[doc = #desc] - #[doc = ""] - #[doc = #link] - pub fn #constr_ident(#required_args) -> Self { - AtlasSearch { - name: #name_text, - stage: doc! { #init_doc }, - _t: PhantomData, - } - } + #[doc = #desc] + #[doc = ""] + #[doc = #link] + #[options_doc(#constr_ident, "into_stage")] + pub fn #constr_ident(#required_args) -> SearchOperator<#name_ident> { + SearchOperator::new( + #name_text, + doc! { #init_doc }, + ) + } + + #[export_doc(#constr_ident)] + impl SearchOperator<#name_ident> { #setters } } @@ -107,80 +127,114 @@ struct Argument { #[derive(Debug, Deserialize)] #[serde(rename_all = "camelCase")] enum ArgumentType { - String, - Object, - SearchScore, - SearchPath, - SearchOperator, + Any, Array, + BinData, + Bool, + Date, + Geometry, Int, + Null, + Number, + Object, + ObjectId, + SearchOperator, + SearchPath, + SearchScore, + String, } -static QUERY: &str = "query"; -static TOKEN_ORDER: &str = "tokenOrder"; -static MATCH_CRITERIA: &str = "matchCriteria"; - impl Argument { - fn rust_type(&self) -> ArgumentRustType { - if self.name == QUERY { - return ArgumentRustType::StringOrArray; - } - if self.name == TOKEN_ORDER { - return ArgumentRustType::TokenOrder; - } - if self.name == MATCH_CRITERIA { - return ArgumentRustType::MatchCriteria; + fn rust_type(&self, operator: &str) -> ArgumentRustType { + match (operator, self.name.as_str()) { + ("autocomplete" | "text", "query") => return ArgumentRustType::StringOrArray, + ("autocomplete", "tokenOrder") => return ArgumentRustType::TokenOrder, + ("text", "matchCriteria") => return ArgumentRustType::MatchCriteria, + ("equals", "value") => return ArgumentRustType::IntoBson, + ("geoShape", "relation") => return ArgumentRustType::Relation, + ("range", "gt" | "gte" | "lt" | "lte") => return ArgumentRustType::RangeValue, + ("near", "origin") => return ArgumentRustType::NearOrigin, + _ => (), } + use ArgumentType::*; match self.type_.as_slice() { - [ArgumentType::String] => ArgumentRustType::String, - [ArgumentType::Object] => ArgumentRustType::Document, - [ArgumentType::SearchScore] => ArgumentRustType::Document, - [ArgumentType::SearchPath] => ArgumentRustType::StringOrArray, - [ArgumentType::SearchOperator, ArgumentType::Array] => ArgumentRustType::Operator, - [ArgumentType::Int] => ArgumentRustType::I32, + [String] => ArgumentRustType::String, + [Object] => ArgumentRustType::Document, + [SearchScore] => ArgumentRustType::Document, + [SearchPath] => ArgumentRustType::StringOrArray, + [SearchOperator] => ArgumentRustType::SearchOperator, + [SearchOperator, Array] => ArgumentRustType::SeachOperatorIter, + [Int] => ArgumentRustType::I32, + [Geometry] => ArgumentRustType::Document, + [Any, Array] => ArgumentRustType::IntoBson, + [Object, Array] => ArgumentRustType::DocumentOrArray, + [Number] => ArgumentRustType::BsonNumber, + [String, Array] => ArgumentRustType::StringOrArray, + [Bool] => ArgumentRustType::Bool, _ => panic!("Unexpected argument types: {:?}", self.type_), } } } enum ArgumentRustType { - String, + Bool, + BsonNumber, Document, + DocumentOrArray, + I32, + IntoBson, + MatchCriteria, + NearOrigin, + RangeValue, + Relation, + SearchOperator, + SeachOperatorIter, + String, StringOrArray, TokenOrder, - MatchCriteria, - Operator, - I32, } impl ArgumentRustType { fn tokens(&self) -> syn::Type { match self { - Self::String => parse_quote! { impl AsRef }, + Self::Bool => parse_quote! { bool }, + Self::BsonNumber => parse_quote! { impl BsonNumber }, Self::Document => parse_quote! { Document }, + Self::DocumentOrArray => parse_quote! { impl DocumentOrArray }, + Self::I32 => parse_quote! { i32 }, + Self::IntoBson => parse_quote! { impl Into }, + Self::MatchCriteria => parse_quote! { MatchCriteria }, + Self::NearOrigin => parse_quote! { impl NearOrigin }, + Self::RangeValue => parse_quote! { impl RangeValue }, + Self::Relation => parse_quote! { Relation }, + Self::SearchOperator => parse_quote! { impl SearchOperatorParam }, + Self::SeachOperatorIter => { + parse_quote! { impl IntoIterator } + } + Self::String => parse_quote! { impl AsRef }, Self::StringOrArray => parse_quote! { impl StringOrArray }, Self::TokenOrder => parse_quote! { TokenOrder }, - Self::MatchCriteria => parse_quote! { MatchCriteria }, - Self::Operator => parse_quote! { impl IntoIterator> }, - Self::I32 => parse_quote! { i32 }, - } - } - - fn variables(&self) -> TokenStream { - match self { - Self::Operator => parse_quote! { T }, - _ => parse_quote! {}, } } fn bson_expr(&self, ident: &syn::Ident) -> syn::Expr { match self { + Self::Document | Self::I32 | Self::Bool => parse_quote! { #ident }, + Self::IntoBson => parse_quote! { #ident.into() }, + Self::SeachOperatorIter => { + parse_quote! { #ident.into_iter().map(|o| o.to_bson()).collect::>() } + } Self::String => parse_quote! { #ident.as_ref() }, - Self::StringOrArray => parse_quote! { #ident.to_bson() }, - Self::TokenOrder | Self::MatchCriteria => parse_quote! { #ident.name() }, - Self::Document | Self::I32 => parse_quote! { #ident }, - Self::Operator => { - parse_quote! { #ident.into_iter().map(Document::from).collect::>() } + Self::StringOrArray + | Self::DocumentOrArray + | Self::SearchOperator + | Self::NearOrigin + | Self::RangeValue + | Self::BsonNumber => { + parse_quote! { #ident.to_bson() } + } + Self::TokenOrder | Self::MatchCriteria | Self::Relation => { + parse_quote! { #ident.name() } } } } @@ -200,11 +254,14 @@ impl TokenStreamExt for TokenStream { fn main() { let mut operators = TokenStream::new(); - for path in [ - "yaml/search/autocomplete.yaml", - "yaml/search/text.yaml", - "yaml/search/compound.yaml", - ] { + let mut paths = Path::new("yaml/search") + .read_dir() + .unwrap() + .map(|e| e.unwrap().path()) + .filter(|p| p.extension().is_some_and(|e| e == "yaml")) + .collect::>(); + paths.sort(); + for path in paths { let contents = std::fs::read_to_string(path).unwrap(); let parsed = serde_yaml::from_str::(&contents) .unwrap() @@ -215,6 +272,7 @@ fn main() { let file = parse_quote! { //! This file was autogenerated. Do not manually edit. use super::*; + use mongodb_internal_macros::{export_doc, options_doc}; #operators }; diff --git a/macros/.gitignore b/macros/.gitignore index b83d22266..ca98cd96e 100644 --- a/macros/.gitignore +++ b/macros/.gitignore @@ -1 +1,2 @@ /target/ +Cargo.lock diff --git a/macros/src/lib.rs b/macros/src/lib.rs index df8299391..293ffa510 100644 --- a/macros/src/lib.rs +++ b/macros/src/lib.rs @@ -30,6 +30,9 @@ pub fn deeplink( crate::rustdoc::deeplink(attr, item) } +/// Generate setters for the given options struct. +/// Arguments: +/// * the fully-qualified path of the struct type #[import_tokens_attr] #[with_custom_parsing(crate::option::OptionSettersArgs)] #[proc_macro_attribute] @@ -40,6 +43,10 @@ pub fn option_setters( crate::option::option_setters(attr, item, __custom_tokens) } +/// Export the setters in this `impl` block so they can be used in `options_doc`. +/// Arguments: +/// * an identifier for the exported list +/// * an optional `extra = [fn_name[,..]]` list of additional setters to include #[proc_macro_attribute] pub fn export_doc( attr: proc_macro::TokenStream, @@ -48,6 +55,10 @@ pub fn export_doc( crate::rustdoc::export_doc(attr, item) } +/// Include options documentation generated by `export_doc` in the rustdoc for this method: +/// Arguments: +/// * the doc identifier given to `export_doc` +/// * an optional `sync` keyword that alters the documentation to be appropriate for a sync action #[import_tokens_attr] #[with_custom_parsing(crate::rustdoc::OptionsDocArgs)] #[proc_macro_attribute] diff --git a/macros/src/rustdoc.rs b/macros/src/rustdoc.rs index 034b97849..7f87e2ff0 100644 --- a/macros/src/rustdoc.rs +++ b/macros/src/rustdoc.rs @@ -153,7 +153,10 @@ pub(crate) fn options_doc( }); let preamble = format!( "These methods can be chained before `{}` to set options:", - if args.is_async() { ".await" } else { "run" } + args.term + .as_ref() + .map(|(_, b)| b.as_str()) + .unwrap_or(".await") ); impl_fn.attrs.push(parse_quote! { #[doc = #preamble] @@ -169,34 +172,29 @@ pub(crate) fn options_doc( pub(crate) struct OptionsDocArgs { foreign_path: syn::Path, - sync: Option<(Token![,], Ident)>, -} - -impl OptionsDocArgs { - fn is_async(&self) -> bool { - self.sync.is_none() - } + term: Option<(Token![,], String)>, } impl Parse for OptionsDocArgs { fn parse(input: ParseStream) -> syn::Result { let foreign_path = input.parse()?; - let sync = if input.is_empty() { + let term = if input.is_empty() { None } else { - Some((input.parse()?, parse_name(input, "sync")?)) + let (comma, lit) = (input.parse()?, input.parse::()?); + Some((comma, lit.value())) }; - Ok(Self { foreign_path, sync }) + Ok(Self { foreign_path, term }) } } impl ToTokens for OptionsDocArgs { fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) { tokens.extend(self.foreign_path.to_token_stream()); - if let Some((comma, ident)) = &self.sync { + if let Some((comma, lit)) = &self.term { tokens.extend(comma.to_token_stream()); - tokens.extend(ident.to_token_stream()); + tokens.extend(lit.to_token_stream()); } } } diff --git a/src/action/aggregate.rs b/src/action/aggregate.rs index deca70703..ca12f5a67 100644 --- a/src/action/aggregate.rs +++ b/src/action/aggregate.rs @@ -40,13 +40,10 @@ impl Database { #[deeplink] #[options_doc(aggregate)] pub fn aggregate(&self, pipeline: impl IntoIterator) -> Aggregate { - Aggregate { - target: AggregateTargetRef::Database(self), - pipeline: pipeline.into_iter().collect(), - options: None, - session: ImplicitSession, - _phantom: PhantomData, - } + Aggregate::new( + AggregateTargetRef::Database(self), + pipeline.into_iter().collect(), + ) } } @@ -65,13 +62,10 @@ where #[deeplink] #[options_doc(aggregate)] pub fn aggregate(&self, pipeline: impl IntoIterator) -> Aggregate { - Aggregate { - target: AggregateTargetRef::Collection(CollRef::new(self)), - pipeline: pipeline.into_iter().collect(), - options: None, - session: ImplicitSession, - _phantom: PhantomData, - } + Aggregate::new( + AggregateTargetRef::Collection(CollRef::new(self)), + pipeline.into_iter().collect(), + ) } } @@ -82,12 +76,12 @@ impl crate::sync::Database { /// See the documentation [here](https://www.mongodb.com/docs/manual/aggregation/) for more /// information on aggregations. /// - /// [`run`](Aggregate::run) will return d[Result>`]. If a + /// [`run`](Aggregate::run) will return d[`Result>`]. If a /// [`crate::sync::ClientSession`] was provided, the returned cursor will be a /// [`crate::sync::SessionCursor`]. If [`with_type`](Aggregate::with_type) was called, the /// returned cursor will be generic over the `T` specified. #[deeplink] - #[options_doc(aggregate, sync)] + #[options_doc(aggregate, "run")] pub fn aggregate(&self, pipeline: impl IntoIterator) -> Aggregate { self.async_database.aggregate(pipeline) } @@ -103,12 +97,12 @@ where /// See the documentation [here](https://www.mongodb.com/docs/manual/aggregation/) for more /// information on aggregations. /// - /// [`run`](Aggregate::run) will return d[Result>`]. If a + /// [`run`](Aggregate::run) will return d[`Result>`]. If a /// `crate::sync::ClientSession` was provided, the returned cursor will be a /// `crate::sync::SessionCursor`. If [`with_type`](Aggregate::with_type) was called, the /// returned cursor will be generic over the `T` specified. #[deeplink] - #[options_doc(aggregate, sync)] + #[options_doc(aggregate, "run")] pub fn aggregate(&self, pipeline: impl IntoIterator) -> Aggregate { self.async_collection.aggregate(pipeline) } @@ -125,6 +119,18 @@ pub struct Aggregate<'a, Session = ImplicitSession, T = Document> { _phantom: PhantomData, } +impl<'a> Aggregate<'a> { + fn new(target: AggregateTargetRef<'a>, pipeline: Vec) -> Self { + Self { + target, + pipeline, + options: None, + session: ImplicitSession, + _phantom: PhantomData, + } + } +} + #[option_setters(crate::coll::options::AggregateOptions)] #[export_doc(aggregate, extra = [session])] impl<'a, Session, T> Aggregate<'a, Session, T> { diff --git a/src/action/bulk_write.rs b/src/action/bulk_write.rs index 04b85530d..1a0ae3d77 100644 --- a/src/action/bulk_write.rs +++ b/src/action/bulk_write.rs @@ -48,13 +48,13 @@ impl crate::sync::Client { /// [here](https://www.mongodb.com/docs/manual/core/retryable-writes/) for more information on /// retryable writes. /// - /// [`run`](BulkWrite::run) will return d[`Result`] or + /// d[`Result`] if [`verbose_results`](BulkWrite::verbose_results) is /// configured. /// /// Bulk write is only available on MongoDB 8.0+. #[deeplink] - #[options_doc(bulk_write, sync)] + #[options_doc(bulk_write, "run")] pub fn bulk_write( &self, models: impl IntoIterator>, diff --git a/src/action/count.rs b/src/action/count.rs index c99aeb26a..587b4b267 100644 --- a/src/action/count.rs +++ b/src/action/count.rs @@ -78,7 +78,7 @@ where /// /// [`run`](EstimatedDocumentCount::run) will return d[`Result`]. #[deeplink] - #[options_doc(estimated_doc_count, sync)] + #[options_doc(estimated_doc_count, "run")] pub fn estimated_document_count(&self) -> EstimatedDocumentCount { self.async_collection.estimated_document_count() } @@ -89,7 +89,7 @@ where /// /// [`run`](CountDocuments::run) will return d[`Result`]. #[deeplink] - #[options_doc(count_docs, sync)] + #[options_doc(count_docs, "run")] pub fn count_documents(&self, filter: Document) -> CountDocuments { self.async_collection.count_documents(filter) } diff --git a/src/action/create_collection.rs b/src/action/create_collection.rs index 2aa1cec55..41f777999 100644 --- a/src/action/create_collection.rs +++ b/src/action/create_collection.rs @@ -47,7 +47,7 @@ impl crate::sync::Database { /// /// [`run`](CreateCollection::run) will return d[`Result<()>`]. #[deeplink] - #[options_doc(create_coll, sync)] + #[options_doc(create_coll, "run")] pub fn create_collection(&self, name: impl Into) -> CreateCollection { self.async_database.create_collection(name) } diff --git a/src/action/create_index.rs b/src/action/create_index.rs index 75dc2ba8d..bd420acf8 100644 --- a/src/action/create_index.rs +++ b/src/action/create_index.rs @@ -71,7 +71,7 @@ where /// /// [`run`](CreateIndex::run) will return d[`Result`]. #[deeplink] - #[options_doc(create_index, sync)] + #[options_doc(create_index, "run")] pub fn create_index(&self, index: IndexModel) -> CreateIndex { self.async_collection.create_index(index) } @@ -80,7 +80,7 @@ where /// /// [`run`](CreateIndex::run) will return d[`Result`]. #[deeplink] - #[options_doc(create_index, sync)] + #[options_doc(create_index, "run")] pub fn create_indexes( &self, indexes: impl IntoIterator, diff --git a/src/action/delete.rs b/src/action/delete.rs index 4bf2dd78a..d94532314 100644 --- a/src/action/delete.rs +++ b/src/action/delete.rs @@ -67,7 +67,7 @@ where /// /// [`run`](Delete::run) will return d[`Result`]. #[deeplink] - #[options_doc(delete, sync)] + #[options_doc(delete, "run")] pub fn delete_one(&self, query: Document) -> Delete { self.async_collection.delete_one(query) } @@ -76,7 +76,7 @@ where /// /// [`run`](Delete::run) will return d[`Result`]. #[deeplink] - #[options_doc(delete, sync)] + #[options_doc(delete, "run")] pub fn delete_many(&self, query: Document) -> Delete { self.async_collection.delete_many(query) } diff --git a/src/action/distinct.rs b/src/action/distinct.rs index 447238f7e..f245f0afd 100644 --- a/src/action/distinct.rs +++ b/src/action/distinct.rs @@ -44,7 +44,7 @@ where /// /// [`run`](Distinct::run) will return d[`Result>`]. #[deeplink] - #[options_doc(distinct, sync)] + #[options_doc(distinct, "run")] pub fn distinct(&self, field_name: impl AsRef, filter: Document) -> Distinct { self.async_collection.distinct(field_name, filter) } diff --git a/src/action/drop.rs b/src/action/drop.rs index 3571a53ed..374dd084d 100644 --- a/src/action/drop.rs +++ b/src/action/drop.rs @@ -35,7 +35,7 @@ impl crate::sync::Database { /// /// [`run`](DropDatabase::run) will return d[`Result<()>`]. #[deeplink] - #[options_doc(drop_db, sync)] + #[options_doc(drop_db, "run")] pub fn drop(&self) -> DropDatabase { self.async_database.drop() } @@ -98,7 +98,7 @@ where /// /// [`run`](DropCollection::run) will return d[`Result<()>`]. #[deeplink] - #[options_doc(drop_coll, sync)] + #[options_doc(drop_coll, "run")] pub fn drop(&self) -> DropCollection { self.async_collection.drop() } diff --git a/src/action/drop_index.rs b/src/action/drop_index.rs index e729d3aba..2ec816ce4 100644 --- a/src/action/drop_index.rs +++ b/src/action/drop_index.rs @@ -55,7 +55,7 @@ where /// /// [`run`](DropIndex::run) will return d[`Result<()>`]. #[deeplink] - #[options_doc(drop_index, sync)] + #[options_doc(drop_index, "run")] pub fn drop_index(&self, name: impl AsRef) -> DropIndex { self.async_collection.drop_index(name) } @@ -64,7 +64,7 @@ where /// /// [`run`](DropIndex::run) will return d[`Result<()>`]. #[deeplink] - #[options_doc(drop_index, sync)] + #[options_doc(drop_index, "run")] pub fn drop_indexes(&self) -> DropIndex { self.async_collection.drop_indexes() } diff --git a/src/action/find.rs b/src/action/find.rs index 7730e25e2..121e9d00c 100644 --- a/src/action/find.rs +++ b/src/action/find.rs @@ -66,7 +66,7 @@ impl crate::sync::Collection { /// [`run`](Find::run) will return d[`Result>`] (or /// d[`Result>`] if a session is provided). #[deeplink] - #[options_doc(find, sync)] + #[options_doc(find, "run")] pub fn find(&self, filter: Document) -> Find<'_, T> { self.async_collection.find(filter) } @@ -78,7 +78,7 @@ impl crate::sync::Collection { /// /// [`run`](FindOne::run) will return d[`Result>`]. #[deeplink] - #[options_doc(find_one, sync)] + #[options_doc(find_one, "run")] pub fn find_one(&self, filter: Document) -> FindOne<'_, T> { self.async_collection.find_one(filter) } diff --git a/src/action/find_and_modify.rs b/src/action/find_and_modify.rs index e8025fd87..0919f86e5 100644 --- a/src/action/find_and_modify.rs +++ b/src/action/find_and_modify.rs @@ -125,7 +125,7 @@ impl crate::sync::Collection { /// /// [`run`](FindOneAndDelete::run) will return d[`Result>`]. #[deeplink] - #[options_doc(find_one_and_delete, sync)] + #[options_doc(find_one_and_delete, "run")] pub fn find_one_and_delete(&self, filter: Document) -> FindOneAndDelete<'_, T> { self.async_collection.find_one_and_delete(filter) } @@ -141,7 +141,7 @@ impl crate::sync::Collection { /// /// [`run`](FindOneAndDelete::run) will return d[`Result>`]. #[deeplink] - #[options_doc(find_one_and_update, sync)] + #[options_doc(find_one_and_update, "run")] pub fn find_one_and_update( &self, filter: Document, @@ -163,7 +163,7 @@ impl crate::sync::Collection { /// /// [`run`](FindOneAndReplace::run) will return d[`Result>`]. #[deeplink] - #[options_doc(find_one_and_replace, sync)] + #[options_doc(find_one_and_replace, "run")] pub fn find_one_and_replace( &self, filter: Document, diff --git a/src/action/gridfs/download.rs b/src/action/gridfs/download.rs index 888608e80..d931fb6c6 100644 --- a/src/action/gridfs/download.rs +++ b/src/action/gridfs/download.rs @@ -115,7 +115,7 @@ impl crate::sync::gridfs::GridFsBucket { /// /// [`run`](OpenDownloadStreamByName::run) will return d[`Result`]. #[deeplink] - #[options_doc(download_by_name, sync)] + #[options_doc(download_by_name, "run")] pub fn open_download_stream_by_name( &self, filename: impl Into, diff --git a/src/action/gridfs/find.rs b/src/action/gridfs/find.rs index df96b9787..afb663457 100644 --- a/src/action/gridfs/find.rs +++ b/src/action/gridfs/find.rs @@ -47,7 +47,7 @@ impl crate::sync::gridfs::GridFsBucket { /// /// [`run`](Find::run) will return d[`Result>`]. #[deeplink] - #[options_doc(find, sync)] + #[options_doc(find, "run")] pub fn find(&self, filter: Document) -> Find { self.async_bucket.find(filter) } @@ -57,7 +57,7 @@ impl crate::sync::gridfs::GridFsBucket { /// /// [`run`](FindOne::run) will return d[`Result>`]. #[deeplink] - #[options_doc(find_one, sync)] + #[options_doc(find_one, "run")] pub fn find_one(&self, filter: Document) -> FindOne { self.async_bucket.find_one(filter) } diff --git a/src/action/gridfs/upload.rs b/src/action/gridfs/upload.rs index f313c455b..fdfbbfe8b 100644 --- a/src/action/gridfs/upload.rs +++ b/src/action/gridfs/upload.rs @@ -32,7 +32,7 @@ impl crate::sync::gridfs::GridFsBucket { /// /// [`run`](OpenUploadStream::run) will return d[`Result`]. #[deeplink] - #[options_doc(open_upload_stream, sync)] + #[options_doc(open_upload_stream, "run")] pub fn open_upload_stream(&self, filename: impl AsRef) -> OpenUploadStream { self.async_bucket.open_upload_stream(filename) } diff --git a/src/action/insert_many.rs b/src/action/insert_many.rs index 62754ba4e..6bb4b8e7d 100644 --- a/src/action/insert_many.rs +++ b/src/action/insert_many.rs @@ -59,7 +59,7 @@ impl crate::sync::Collection { /// /// [`run`](InsertMany::run) will return d[`Result`]. #[deeplink] - #[options_doc(insert_many, sync)] + #[options_doc(insert_many, "run")] pub fn insert_many(&self, docs: impl IntoIterator>) -> InsertMany { self.async_collection.insert_many(docs) } diff --git a/src/action/insert_one.rs b/src/action/insert_one.rs index 997ab0e49..7699f1636 100644 --- a/src/action/insert_one.rs +++ b/src/action/insert_one.rs @@ -54,7 +54,7 @@ impl crate::sync::Collection { /// /// [`run`](InsertOne::run) will return d[`Result`]. #[deeplink] - #[options_doc(insert_one, sync)] + #[options_doc(insert_one, "run")] pub fn insert_one(&self, doc: impl Borrow) -> InsertOne { self.async_collection.insert_one(doc) } diff --git a/src/action/list_collections.rs b/src/action/list_collections.rs index 21f261fb5..60fcffa4a 100644 --- a/src/action/list_collections.rs +++ b/src/action/list_collections.rs @@ -63,7 +63,7 @@ impl crate::sync::Database { /// [`run`](ListCollections::run) will return /// d[`Result>`]. #[deeplink] - #[options_doc(list_collections, sync)] + #[options_doc(list_collections, "run")] pub fn list_collections(&self) -> ListCollections { self.async_database.list_collections() } @@ -72,7 +72,7 @@ impl crate::sync::Database { /// /// [`run`](ListCollections::run) will return d[`Result>`]. #[deeplink] - #[options_doc(list_collections, sync)] + #[options_doc(list_collections, "run")] pub fn list_collection_names(&self) -> ListCollections<'_, ListNames> { self.async_database.list_collection_names() } diff --git a/src/action/list_databases.rs b/src/action/list_databases.rs index 2d2c59dd1..79838c811 100644 --- a/src/action/list_databases.rs +++ b/src/action/list_databases.rs @@ -59,7 +59,7 @@ impl SyncClient { /// /// [run](ListDatabases::run) will return d[`Result>`]. #[deeplink] - #[options_doc(list_databases, sync)] + #[options_doc(list_databases, "run")] pub fn list_databases(&self) -> ListDatabases { self.async_client.list_databases() } @@ -68,7 +68,7 @@ impl SyncClient { /// /// [run](ListDatabases::run) will return d[`Result>`]. #[deeplink] - #[options_doc(list_databases, sync)] + #[options_doc(list_databases, "run")] pub fn list_database_names(&self) -> ListDatabases<'_, ListNames> { self.async_client.list_database_names() } diff --git a/src/action/list_indexes.rs b/src/action/list_indexes.rs index acbf4bbde..51808722f 100644 --- a/src/action/list_indexes.rs +++ b/src/action/list_indexes.rs @@ -71,7 +71,7 @@ where /// [`run`](ListIndexes::run) will return d[`Result>`] (or /// d[`Result>`] if a `ClientSession` is provided). #[deeplink] - #[options_doc(list_indexes, sync)] + #[options_doc(list_indexes, "run")] pub fn list_indexes(&self) -> ListIndexes { self.async_collection.list_indexes() } @@ -80,7 +80,7 @@ where /// /// [`run`](ListIndexes::run) will return d[`Result>`]. #[deeplink] - #[options_doc(list_indexes, sync)] + #[options_doc(list_indexes, "run")] pub fn list_index_names(&self) -> ListIndexes { self.async_collection.list_index_names() } diff --git a/src/action/replace_one.rs b/src/action/replace_one.rs index af3d31392..8504cfa16 100644 --- a/src/action/replace_one.rs +++ b/src/action/replace_one.rs @@ -50,7 +50,7 @@ impl crate::sync::Collection { /// /// [`run`](ReplaceOne::run) will return d[`Result`]. #[deeplink] - #[options_doc(replace_one, sync)] + #[options_doc(replace_one, "run")] pub fn replace_one(&self, query: Document, replacement: impl Borrow) -> ReplaceOne { self.async_collection.replace_one(query, replacement) } diff --git a/src/action/run_command.rs b/src/action/run_command.rs index 4d2353599..cc8c85b7f 100644 --- a/src/action/run_command.rs +++ b/src/action/run_command.rs @@ -110,7 +110,7 @@ impl crate::sync::Database { /// /// [`run`](RunCommand::run) will return d[`Result`]. #[deeplink] - #[options_doc(run_command, sync)] + #[options_doc(run_command, "run")] pub fn run_command(&self, command: Document) -> RunCommand { self.async_database.run_command(command) } @@ -125,7 +125,7 @@ impl crate::sync::Database { /// /// [`run`](RunCommand::run) will return d[`Result`]. #[deeplink] - #[options_doc(run_command, sync)] + #[options_doc(run_command, "run")] pub fn run_raw_command(&self, command: RawDocumentBuf) -> RunCommand { self.async_database.run_raw_command(command) } @@ -135,7 +135,7 @@ impl crate::sync::Database { /// [`run`](RunCursorCommand::run) will return d[`Result>`] or a /// d[`Result>`] if a [`ClientSession`] is provided. #[deeplink] - #[options_doc(run_cursor_command, sync)] + #[options_doc(run_cursor_command, "run")] pub fn run_cursor_command(&self, command: Document) -> RunCursorCommand { self.async_database.run_cursor_command(command) } @@ -145,7 +145,7 @@ impl crate::sync::Database { /// [`run`](RunCursorCommand::run) will return d[`Result>`] or a /// d[`Result>`] if a [`ClientSession`] is provided. #[deeplink] - #[options_doc(run_cursor_command, sync)] + #[options_doc(run_cursor_command, "run")] pub fn run_raw_cursor_command(&self, command: RawDocumentBuf) -> RunCursorCommand { self.async_database.run_raw_cursor_command(command) } diff --git a/src/action/search_index.rs b/src/action/search_index.rs index 37a5526a5..b1fe5436a 100644 --- a/src/action/search_index.rs +++ b/src/action/search_index.rs @@ -118,7 +118,7 @@ where /// /// [`run`](CreateSearchIndex::run) will return d[`Result>`]. #[deeplink] - #[options_doc(create_search_index, sync)] + #[options_doc(create_search_index, "run")] pub fn create_search_indexes( &self, models: impl IntoIterator, @@ -130,7 +130,7 @@ where /// /// [`run`](CreateSearchIndex::run) will return d[`Result`]. #[deeplink] - #[options_doc(create_search_index, sync)] + #[options_doc(create_search_index, "run")] pub fn create_search_index(&self, model: SearchIndexModel) -> CreateSearchIndex { self.async_collection.create_search_index(model) } @@ -138,7 +138,7 @@ where /// Updates the search index with the given name to use the provided definition. /// /// [`run`](UpdateSearchIndex::run) will return [`Result<()>`]. - #[options_doc(update_search_index, sync)] + #[options_doc(update_search_index, "run")] pub fn update_search_index( &self, name: impl Into, @@ -150,7 +150,7 @@ where /// Drops the search index with the given name. /// /// [`run`](DropSearchIndex::run) will return [`Result<()>`]. - #[options_doc(drop_search_index, sync)] + #[options_doc(drop_search_index, "run")] pub fn drop_search_index(&self, name: impl Into) -> DropSearchIndex { self.async_collection.drop_search_index(name) } @@ -162,7 +162,7 @@ where /// /// [`run`](ListSearchIndexes::run) will return d[`Result>`]. #[deeplink] - #[options_doc(list_search_indexes, sync)] + #[options_doc(list_search_indexes, "run")] pub fn list_search_indexes(&self) -> ListSearchIndexes { self.async_collection.list_search_indexes() } diff --git a/src/action/session.rs b/src/action/session.rs index 6b4d8fb91..ad16f0fac 100644 --- a/src/action/session.rs +++ b/src/action/session.rs @@ -27,7 +27,7 @@ impl crate::sync::Client { /// /// [run](StartSession::run) will return d[`Result`]. #[deeplink] - #[options_doc(start_session, sync)] + #[options_doc(start_session, "run")] pub fn start_session(&self) -> StartSession { self.async_client.start_session() } diff --git a/src/action/shutdown.rs b/src/action/shutdown.rs index f8336c131..2b6ff7095 100644 --- a/src/action/shutdown.rs +++ b/src/action/shutdown.rs @@ -120,7 +120,7 @@ impl crate::sync::Client { /// `GridFsUploadStream`. /// /// [`run`](Shutdown::run) will return `()`. - #[options_doc(shutdown, sync)] + #[options_doc(shutdown, "run")] pub fn shutdown(self) -> Shutdown { self.async_client.shutdown() } diff --git a/src/action/transaction.rs b/src/action/transaction.rs index 306e242d6..ea5242622 100644 --- a/src/action/transaction.rs +++ b/src/action/transaction.rs @@ -132,7 +132,7 @@ impl crate::sync::ClientSession { /// ``` /// /// [`run`](StartTransaction::run) will return [`Result<()>`]. - #[options_doc(start_transaction, sync)] + #[options_doc(start_transaction, "run")] pub fn start_transaction(&mut self) -> StartTransaction<&mut Self> { StartTransaction { session: self, diff --git a/src/action/update.rs b/src/action/update.rs index de7deb5bf..d0507568d 100644 --- a/src/action/update.rs +++ b/src/action/update.rs @@ -76,7 +76,7 @@ where /// /// [`run`](Update::run) will return d[`Result`]. #[deeplink] - #[options_doc(update, sync)] + #[options_doc(update, "run")] pub fn update_many(&self, query: Document, update: impl Into) -> Update { self.async_collection.update_many(query, update) } @@ -94,7 +94,7 @@ where /// /// [`run`](Update::run) will return d[`Result`]. #[deeplink] - #[options_doc(update, sync)] + #[options_doc(update, "run")] pub fn update_one(&self, query: Document, update: impl Into) -> Update { self.async_collection.update_one(query, update) } diff --git a/src/action/watch.rs b/src/action/watch.rs index b3044a286..df9cea910 100644 --- a/src/action/watch.rs +++ b/src/action/watch.rs @@ -127,7 +127,7 @@ impl crate::sync::Client { /// /// Change streams require either a "majority" read concern or no read /// concern. Anything else will cause a server error. - #[options_doc(watch, sync)] + #[options_doc(watch, "run")] pub fn watch(&self) -> Watch { self.async_client.watch() } @@ -144,7 +144,7 @@ impl crate::sync::Database { /// /// Change streams require either a "majority" read concern or no read /// concern. Anything else will cause a server error. - #[options_doc(watch, sync)] + #[options_doc(watch, "run")] pub fn watch(&self) -> Watch { self.async_database.watch() } @@ -165,7 +165,7 @@ where /// /// Change streams require either a "majority" read concern or no read concern. Anything else /// will cause a server error. - #[options_doc(watch, sync)] + #[options_doc(watch, "run")] pub fn watch(&self) -> Watch { self.async_collection.watch() } diff --git a/src/atlas_search.rs b/src/atlas_search.rs index c82645190..f0f6fd0a0 100644 --- a/src/atlas_search.rs +++ b/src/atlas_search.rs @@ -1,96 +1,258 @@ -//! Helpers for building Atlas Search aggregation pipelines. - +//! Helpers for building Atlas Search aggregation pipelines. Use one of the constructor functions +//! and chain optional value setters, and then convert to a pipeline stage [`Document`] via +//! [`into_stage`](SearchOperator::into_stage). +//! +//! ```no_run +//! # async fn wrapper() -> mongodb::error::Result<()> { +//! # use mongodb::{Collection, bson::{Document, doc}}; +//! # let collection: Collection = todo!(); +//! use mongodb::atlas_search; +//! let cursor = collection.aggregate(vec![ +//! atlas_search::autocomplete("title", "pre") +//! .fuzzy(doc! { "maxEdits": 1, "prefixLength": 1, "maxExpansions": 256 }) +//! .into_stage(), +//! doc! { +//! "$limit": 10, +//! }, +//! doc! { +//! "$project": { +//! "_id": 0, +//! "title": 1, +//! } +//! }, +//! ]).await?; +//! # Ok(()) +//! # } mod gen; pub use gen::*; use std::marker::PhantomData; -use crate::bson::{doc, Bson, Document}; +use crate::bson::{doc, Bson, DateTime, Document}; +use mongodb_internal_macros::{export_doc, options_doc}; -/// A helper to build the aggregation stage for Atlas Search. Use one of the constructor functions -/// and chain optional value setters, and then convert to a pipeline stage [`Document`] via -/// [`into`](Into::into) or [`on_index`](AtlasSearch::on_index). -/// -/// ```no_run -/// # async fn wrapper() -> mongodb::error::Result<()> { -/// # use mongodb::{Collection, atlas_search::AtlasSearch, bson::{Document, doc}}; -/// # let collection: Collection = todo!(); -/// let cursor = collection.aggregate(vec![ -/// AtlasSearch::autocomplete("title", "pre") -/// .fuzzy(doc! { "maxEdits": 1, "prefixLength": 1, "maxExpansions": 256 }) -/// .into(), -/// doc! { -/// "$limit": 10, -/// }, -/// doc! { -/// "$project": { -/// "_id": 0, -/// "title": 1, -/// } -/// }, -/// ]).await?; -/// # Ok(()) -/// # } -pub struct AtlasSearch { - name: &'static str, - stage: Document, +/// A helper to build the aggregation stage for Atlas Search. +pub struct SearchOperator { + pub(crate) name: &'static str, + pub(crate) spec: Document, _t: PhantomData, } -impl From> for Document { - fn from(value: AtlasSearch) -> Self { - doc! { - "$search": { - value.name: value.stage - } +impl SearchOperator { + fn new(name: &'static str, spec: Document) -> Self { + Self { + name, + spec, + _t: PhantomData, } } -} -impl AtlasSearch { + /// Finalize this search operator as a `$search` aggregation stage document. + pub fn into_stage(self) -> Document { + search(self).into_stage() + } + + /// Finalize this search operator as a `$searchMeta` aggregation stage document. + pub fn into_stage_meta(self) -> Document { + search_meta(self).into_stage() + } + /// Erase the type of this builder. Not typically needed, but can be useful to include builders /// of different types in a single `Vec`: /// ```no_run /// # async fn wrapper() -> mongodb::error::Result<()> { - /// # use mongodb::{Collection, atlas_search::AtlasSearch, bson::{Document, doc}}; + /// # use mongodb::{Collection, bson::{Document, doc}}; /// # let collection: Collection = todo!(); + /// use mongodb::atlas_search; /// let cursor = collection.aggregate(vec![ - /// AtlasSearch::compound() + /// atlas_search::compound() /// .must(vec![ - /// AtlasSearch::text("description", "varieties").unit(), - /// AtlasSearch::compound() - /// .should(AtlasSearch::text("description", "Fuji")) + /// atlas_search::text("description", "varieties").unit(), + /// atlas_search::compound() + /// .should(atlas_search::text("description", "Fuji")) /// .unit(), /// ]) - /// .into(), + /// .into_stage(), /// ]).await?; /// # } /// ``` - pub fn unit(self) -> AtlasSearch<()> { - AtlasSearch { + pub fn unit(self) -> SearchOperator<()> { + SearchOperator { name: self.name, - stage: self.stage, + spec: self.spec, _t: PhantomData, } } +} - /// Like [`into`](Into::into), converts this builder into an aggregate pipeline stage - /// [`Document`], but also specify the search index to use. - pub fn on_index(self, index: impl AsRef) -> Document { - doc! { - "$search": { - "index": index.as_ref(), - self.name: self.stage, - } - } +/// Finalize a search operator as a pending `$search` aggregation stage, allowing +/// options to be set. +/// ```no_run +/// # async fn wrapper() -> mongodb::error::Result<()> { +/// # use mongodb::{Collection, bson::{Document, doc}}; +/// # let collection: Collection = todo!(); +/// use mongodb::atlas_search::{autocomplete, search}; +/// let cursor = collection.aggregate(vec![ +/// search( +/// autocomplete("title", "pre") +/// .fuzzy(doc! { "maxEdits": 1, "prefixLength": 1, "maxExpansions": 256 }) +/// ) +/// .index("movies") +/// .into_stage(), +/// doc! { +/// "$limit": 10, +/// }, +/// doc! { +/// "$project": { +/// "_id": 0, +/// "title": 1, +/// } +/// }, +/// ]).await?; +/// # Ok(()) +/// # } +/// ``` +#[options_doc(atlas_search, "into_stage")] +pub fn search(op: SearchOperator) -> AtlasSearch { + AtlasSearch { + stage: doc! { op.name: op.spec }, + } +} + +/// A pending `$search` aggregation stage. Construct with [`search`]. +pub struct AtlasSearch { + stage: Document, +} + +#[export_doc(atlas_search)] +impl AtlasSearch { + /// Parallelize search across segments on dedicated search nodes. + pub fn concurrent(mut self, value: bool) -> Self { + self.stage.insert("concurrent", value); + self + } + + /// Document that specifies the count options for retrieving a count of the results. + pub fn count(mut self, value: Document) -> Self { + self.stage.insert("count", value); + self + } + + /// Document that specifies the highlighting options for displaying search terms in their + /// original context. + pub fn highlight(mut self, value: Document) -> Self { + self.stage.insert("highlight", value); + self + } + + /// Name of the Atlas Search index to use. + pub fn index(mut self, value: impl Into) -> Self { + self.stage.insert("index", value.into()); + self + } + + /// Flag that specifies whether to perform a full document lookup on the backend database or + /// return only stored source fields directly from Atlas Search. + pub fn return_stored_source(mut self, value: bool) -> Self { + self.stage.insert("returnStoredSource", value); + self + } + + /// Reference point for retrieving results. + pub fn search_after(mut self, value: impl Into) -> Self { + self.stage.insert("searchAfter", value.into()); + self + } + + /// Reference point for retrieving results. + pub fn search_before(mut self, value: impl Into) -> Self { + self.stage.insert("searchBefore", value.into()); + self + } + + /// Flag that specifies whether to retrieve a detailed breakdown of the score for the documents + /// in the results. + pub fn score_details(mut self, value: bool) -> Self { + self.stage.insert("scoreDetails", value); + self + } + + /// Document that specifies the fields to sort the Atlas Search results by in ascending or + /// descending order. + pub fn sort(mut self, value: Document) -> Self { + self.stage.insert("sort", value); + self + } + + /// Convert to an aggregation stage document. + pub fn into_stage(self) -> Document { + doc! { "$search": self.stage } + } +} + +/// Finalize a search operator as a pending `$searchMeta` aggregation stage, allowing +/// options to be set. +/// ```no_run +/// # async fn wrapper() -> mongodb::error::Result<()> { +/// # use mongodb::{Collection, bson::{DateTime, Document, doc}}; +/// # let collection: Collection = todo!(); +/// # let start: DateTime = todo!(); +/// # let end: DateTime = todo!(); +/// use mongodb::atlas_search::{facet, range, search_meta}; +/// let cursor = collection.aggregate(vec![ +/// search_meta( +/// facet(doc! { +/// "directorsFacet": facet::string("directors").num_buckets(7), +/// "yearFacet": facet::number("year", [2000, 2005, 2010, 2015]), +/// }) +/// .operator(range("released").gte(start).lte(end)) +/// ) +/// .index("movies") +/// .into_stage(), +/// doc! { +/// "$limit": 10, +/// }, +/// ]).await?; +/// # Ok(()) +/// # } +/// ``` +#[options_doc(atlas_search_meta, "into_stage")] +pub fn search_meta(op: SearchOperator) -> AtlasSearchMeta { + AtlasSearchMeta { + stage: doc! { op.name: op.spec }, + } +} + +/// A pending `$searchMeta` aggregation stage. Construct with [`search_meta`]. +pub struct AtlasSearchMeta { + stage: Document, +} + +#[export_doc(atlas_search_meta)] +impl AtlasSearchMeta { + /// Document that specifies the count options for retrieving a count of the results. + pub fn count(mut self, value: Document) -> Self { + self.stage.insert("count", value); + self + } + + /// Name of the Atlas Search index to use. + pub fn index(mut self, value: impl Into) -> Self { + self.stage.insert("index", value.into()); + self + } + + /// Convert to an aggregation stage document. + pub fn into_stage(self) -> Document { + doc! { "$searchMeta": self.stage } } } -impl IntoIterator for AtlasSearch { - type Item = AtlasSearch; +impl IntoIterator for SearchOperator { + type Item = SearchOperator; - type IntoIter = std::iter::Once>; + type IntoIter = std::iter::Once>; fn into_iter(self) -> Self::IntoIter { std::iter::once(self) @@ -142,144 +304,201 @@ impl MatchCriteria { } } -/// An Atlas Search operator parameter that can be either a string or array of strings. -pub trait StringOrArray { - #[allow(missing_docs)] - fn to_bson(self) -> Bson; -} +mod private { + use crate::bson::{doc, Bson}; -impl StringOrArray for &str { - fn to_bson(self) -> Bson { - Bson::String(self.to_owned()) + /// An Atlas Search operator parameter that can accept multiple types. + pub trait Parameter { + fn to_bson(self) -> Bson; } -} -impl StringOrArray for String { - fn to_bson(self) -> Bson { - Bson::String(self) + impl> Parameter for T { + fn to_bson(self) -> Bson { + self.into() + } } -} -impl StringOrArray for &String { - fn to_bson(self) -> Bson { - Bson::String(self.clone()) + impl Parameter for super::SearchOperator { + fn to_bson(self) -> Bson { + Bson::Document(doc! { self.name: self.spec }) + } } } -impl StringOrArray for &[&str] { - fn to_bson(self) -> Bson { - Bson::Array(self.iter().map(|&s| Bson::String(s.to_owned())).collect()) - } -} +/// An Atlas Search operator parameter that can be either a string or array of strings. +pub trait StringOrArray: private::Parameter {} +impl StringOrArray for &str {} +impl StringOrArray for String {} +#[cfg(feature = "bson-3")] +impl StringOrArray for [&str; N] {} +impl StringOrArray for &[&str] {} +impl StringOrArray for &[String] {} -impl StringOrArray for &[&str; N] { - fn to_bson(self) -> Bson { - Bson::Array(self.iter().map(|&s| Bson::String(s.to_owned())).collect()) - } -} +/// An Atlas Search operator parameter that is itself a search operator. +pub trait SearchOperatorParam: private::Parameter {} +impl SearchOperatorParam for SearchOperator {} +impl SearchOperatorParam for Document {} -impl StringOrArray for &[String] { - fn to_bson(self) -> Bson { - Bson::Array(self.iter().map(|s| Bson::String(s.clone())).collect()) - } -} +/// Facet definitions. These can be used when constructing a facet definition doc: +/// ``` +/// # use mongodb::bson::doc; +/// use mongodb::atlas_search::facet; +/// let search = facet(doc! { +/// "directorsFacet": facet::string("directors").num_buckets(7), +/// "yearFacet": facet::number("year", [2000, 2005, 2010, 2015]), +/// }); +/// ``` +pub mod facet { + use crate::bson::{doc, Bson, Document}; + use std::marker::PhantomData; -impl StringOrArray for &[String; N] { - fn to_bson(self) -> Bson { - Bson::Array(self.iter().map(|s| Bson::String(s.clone())).collect()) + /// A facet definition; see the [facet docs](https://www.mongodb.com/docs/atlas/atlas-search/facet/) for more details. + pub struct Facet { + inner: Document, + _t: PhantomData, } -} -impl StringOrArray for [String; N] { - fn to_bson(self) -> Bson { - Bson::Array(self.into_iter().map(Bson::String).collect()) + impl From> for Bson { + fn from(value: Facet) -> Self { + Bson::Document(value.inner) + } } -} -impl StringOrArray for &[&String] { - fn to_bson(self) -> Bson { - Bson::Array(self.iter().map(|&s| Bson::String(s.clone())).collect()) + /// A string facet. Construct with [`facet::string`](string). + pub struct String; + /// String facets allow you to narrow down Atlas Search results based on the most frequent + /// string values in the specified string field. + pub fn string(path: impl AsRef) -> Facet { + Facet { + inner: doc! { + "type": "string", + "path": path.as_ref(), + }, + _t: PhantomData, + } + } + impl Facet { + /// Maximum number of facet categories to return in the results. Value must be less than or + /// equal to 1000. + pub fn num_buckets(mut self, num: i32) -> Self { + self.inner.insert("numBuckets", num); + self + } } -} -impl StringOrArray for &[&String; N] { - fn to_bson(self) -> Bson { - Bson::Array(self.iter().map(|&s| Bson::String(s.clone())).collect()) + /// A number facet. Construct with [`facet::number`](number). + pub struct Number; + /// Numeric facets allow you to determine the frequency of numeric values in your search results + /// by breaking the results into separate ranges of numbers. + pub fn number( + path: impl AsRef, + boundaries: impl IntoIterator>, + ) -> Facet { + Facet { + inner: doc! { + "type": "number", + "path": path.as_ref(), + "boundaries": boundaries.into_iter().map(Into::into).collect::>(), + }, + _t: PhantomData, + } + } + impl Facet { + /// Name of an additional bucket that counts documents returned from the operator that do + /// not fall within the specified boundaries. + pub fn default_bucket(mut self, bucket: impl AsRef) -> Self { + self.inner.insert("default", bucket.as_ref()); + self + } } -} -impl StringOrArray for Vec<&str> { - fn to_bson(self) -> Bson { - Bson::Array( - self.into_iter() - .map(|s| Bson::String(s.to_owned())) - .collect(), - ) + /// A date facet. Construct with [`facet::date`](date). + pub struct Date; + /// Date facets allow you to narrow down search results based on a date. + pub fn date( + path: impl AsRef, + boundaries: impl IntoIterator, + ) -> Facet { + Facet { + inner: doc! { + "type": "date", + "path": path.as_ref(), + "boundaries": boundaries.into_iter().collect::>(), + }, + _t: PhantomData, + } + } + impl Facet { + /// Name of an additional bucket that counts documents returned from the operator that do + /// not fall within the specified boundaries. + pub fn default_bucket(mut self, bucket: impl AsRef) -> Self { + self.inner.insert("default", bucket.as_ref()); + self + } } } -impl StringOrArray for Vec { - fn to_bson(self) -> Bson { - Bson::Array(self.into_iter().map(Bson::String).collect()) - } +/// Relation of the query shape geometry to the indexed field geometry. +#[derive(Debug, Clone, PartialEq)] +#[non_exhaustive] +pub enum Relation { + /// Indicates that the indexed geometry contains the query geometry. + Contains, + /// Indicates that both the query and indexed geometries have nothing in common. + Disjoint, + /// Indicates that both the query and indexed geometries intersect. + Intersects, + /// Indicates that the indexed geometry is within the query geometry. You can't use within with + /// LineString or Point. + Within, + /// Fallback for future compatibility. + Other(String), } -impl StringOrArray for Vec<&String> { - fn to_bson(self) -> Bson { - Bson::Array(self.into_iter().map(|s| Bson::String(s.clone())).collect()) +impl Relation { + fn name(&self) -> &str { + match self { + Self::Contains => "contains", + Self::Disjoint => "disjoint", + Self::Intersects => "intersects", + Self::Within => "within", + Self::Other(s) => s, + } } } -#[tokio::test] -async fn api_flow() { - // This is currently intended as a testbed for how the API works, not as an actual test. - return; - - #[allow(unreachable_code)] - { - #[allow(unused_variables)] - let coll: crate::Collection = todo!(); - let _ = coll - .aggregate(vec![ - AtlasSearch::autocomplete("title", "pre") - .fuzzy(doc! { "maxEdits": 1, "prefixLength": 1, "maxExpansions": 256 }) - .into(), - doc! { - "$limit": 10, - }, - doc! { - "$project": { - "_id": 0, - "title": 1, - } - }, - ]) - .await; - let _ = coll - .aggregate(vec![ - AtlasSearch::text("plot", "baseball").into(), - doc! { "$limit": 3 }, - doc! { - "$project": { - "_id": 0, - "title": 1, - "plot": 1, - } - }, - ]) - .await; - let _ = coll - .aggregate(vec![ - AtlasSearch::compound() - .must(AtlasSearch::text("description", "varieties")) - .should(AtlasSearch::text("description", "Fuji")) - .into(), - doc! { - "$project": { - "score": { "$meta": "searchScore" } - } - }, - ]) - .await; - } +/// An Atlas Search operator parameter that can be either a document or array of documents. +pub trait DocumentOrArray: private::Parameter {} +impl DocumentOrArray for Document {} +#[cfg(feature = "bson-3")] +impl DocumentOrArray for [Document; N] {} +impl DocumentOrArray for &[Document] {} + +macro_rules! numeric { + ($trait:ty) => { + impl $trait for i32 {} + impl $trait for i64 {} + impl $trait for u32 {} + impl $trait for f32 {} + impl $trait for f64 {} + }; } + +/// An Atlas Search operator parameter that can be a date, number, or GeoJSON point. +pub trait NearOrigin: private::Parameter {} +impl NearOrigin for DateTime {} +impl NearOrigin for Document {} +numeric! { NearOrigin } + +/// An Atlas Search operator parameter that can be any BSON numeric type. +pub trait BsonNumber: private::Parameter {} +numeric! { BsonNumber } + +/// An Atlas Search operator parameter that can be compared using [`range`]. +pub trait RangeValue: private::Parameter {} +numeric! { RangeValue } +impl RangeValue for DateTime {} +impl RangeValue for &str {} +impl RangeValue for &String {} +impl RangeValue for String {} +impl RangeValue for crate::bson::oid::ObjectId {} diff --git a/src/atlas_search/gen.rs b/src/atlas_search/gen.rs index 31111c43b..0c4eb4274 100644 --- a/src/atlas_search/gen.rs +++ b/src/atlas_search/gen.rs @@ -1,135 +1,543 @@ //! This file was autogenerated. Do not manually edit. use super::*; -#[allow(missing_docs)] +use mongodb_internal_macros::{export_doc, options_doc}; +///`autocomplete` Atlas Search operator. Construct with [`autocomplete`](autocomplete()). pub struct Autocomplete; -impl AtlasSearch { - /**The autocomplete operator performs a search for a word or phrase that - contains a sequence of characters from an incomplete input string. The - fields that you intend to query with the autocomplete operator must be - indexed with the autocomplete data type in the collection's index definition. - */ - /// - ///For more details, see the [autocomplete operator reference](https://www.mongodb.com/docs/atlas/atlas-search/autocomplete/). - pub fn autocomplete(path: impl StringOrArray, query: impl StringOrArray) -> Self { - AtlasSearch { - name: "autocomplete", - stage: doc! { - "path" : path.to_bson(), "query" : query.to_bson(), - }, - _t: PhantomData, - } - } +/**The autocomplete operator performs a search for a word or phrase that +contains a sequence of characters from an incomplete input string. The +fields that you intend to query with the autocomplete operator must be +indexed with the autocomplete data type in the collection's index definition. +*/ +/// +///For more details, see the [autocomplete operator reference](https://www.mongodb.com/docs/atlas/atlas-search/autocomplete/). +#[options_doc(autocomplete, "into_stage")] +pub fn autocomplete( + path: impl StringOrArray, + query: impl StringOrArray, +) -> SearchOperator { + SearchOperator::new( + "autocomplete", + doc! { + "path" : path.to_bson(), "query" : query.to_bson(), + }, + ) +} +#[export_doc(autocomplete)] +impl SearchOperator { #[allow(missing_docs)] pub fn token_order(mut self, token_order: TokenOrder) -> Self { - self.stage.insert("tokenOrder", token_order.name()); + self.spec.insert("tokenOrder", token_order.name()); self } #[allow(missing_docs)] pub fn fuzzy(mut self, fuzzy: Document) -> Self { - self.stage.insert("fuzzy", fuzzy); + self.spec.insert("fuzzy", fuzzy); self } #[allow(missing_docs)] pub fn score(mut self, score: Document) -> Self { - self.stage.insert("score", score); + self.spec.insert("score", score); self } } -#[allow(missing_docs)] -pub struct Text; -impl AtlasSearch { - /**The text operator performs a full-text search using the analyzer that you specify in the index configuration. - If you omit an analyzer, the text operator uses the default standard analyzer. - */ - /// - ///For more details, see the [text operator reference](https://www.mongodb.com/docs/atlas/atlas-search/text/). - pub fn text(path: impl StringOrArray, query: impl StringOrArray) -> Self { - AtlasSearch { - name: "text", - stage: doc! { - "path" : path.to_bson(), "query" : query.to_bson(), - }, - _t: PhantomData, - } +///`compound` Atlas Search operator. Construct with [`compound`](compound()). +pub struct Compound; +/**The compound operator combines two or more operators into a single query. +Each element of a compound query is called a clause, and each clause +consists of one or more sub-queries. +*/ +/// +///For more details, see the [compound operator reference](https://www.mongodb.com/docs/atlas/atlas-search/compound/). +#[options_doc(compound, "into_stage")] +pub fn compound() -> SearchOperator { + SearchOperator::new("compound", doc! {}) +} +#[export_doc(compound)] +impl SearchOperator { + #[allow(missing_docs)] + pub fn must(mut self, must: impl IntoIterator) -> Self { + self.spec.insert( + "must", + must.into_iter().map(|o| o.to_bson()).collect::>(), + ); + self } #[allow(missing_docs)] - pub fn fuzzy(mut self, fuzzy: Document) -> Self { - self.stage.insert("fuzzy", fuzzy); + pub fn must_not( + mut self, + must_not: impl IntoIterator, + ) -> Self { + self.spec.insert( + "mustNot", + must_not + .into_iter() + .map(|o| o.to_bson()) + .collect::>(), + ); self } #[allow(missing_docs)] - pub fn match_criteria(mut self, match_criteria: MatchCriteria) -> Self { - self.stage.insert("matchCriteria", match_criteria.name()); + pub fn should(mut self, should: impl IntoIterator) -> Self { + self.spec.insert( + "should", + should.into_iter().map(|o| o.to_bson()).collect::>(), + ); + self + } + #[allow(missing_docs)] + pub fn filter(mut self, filter: impl IntoIterator) -> Self { + self.spec.insert( + "filter", + filter.into_iter().map(|o| o.to_bson()).collect::>(), + ); + self + } + #[allow(missing_docs)] + pub fn minimum_should_match(mut self, minimum_should_match: i32) -> Self { + self.spec.insert("minimumShouldMatch", minimum_should_match); + self + } + #[allow(missing_docs)] + pub fn score(mut self, score: Document) -> Self { + self.spec.insert("score", score); + self + } +} +///`embeddedDocument` Atlas Search operator. Construct with +/// [`embedded_document`](embedded_document()). +pub struct EmbeddedDocument; +/**The embeddedDocument operator is similar to $elemMatch operator. +It constrains multiple query predicates to be satisfied from a single +element of an array of embedded documents. embeddedDocument can be used only +for queries over fields of the embeddedDocuments +*/ +/// +///For more details, see the [embeddedDocument operator reference](https://www.mongodb.com/docs/atlas/atlas-search/embedded-document/). +#[options_doc(embedded_document, "into_stage")] +pub fn embedded_document( + path: impl StringOrArray, + operator: impl SearchOperatorParam, +) -> SearchOperator { + SearchOperator::new( + "embeddedDocument", + doc! { + "path" : path.to_bson(), "operator" : operator.to_bson(), + }, + ) +} +#[export_doc(embedded_document)] +impl SearchOperator { + #[allow(missing_docs)] + pub fn score(mut self, score: Document) -> Self { + self.spec.insert("score", score); + self + } +} +///`equals` Atlas Search operator. Construct with [`equals`](equals()). +pub struct Equals; +/**The equals operator checks whether a field matches a value you specify. + * */ +/// +///For more details, see the [equals operator reference](https://www.mongodb.com/docs/atlas/atlas-search/equals/). +#[options_doc(equals, "into_stage")] +pub fn equals(path: impl StringOrArray, value: impl Into) -> SearchOperator { + SearchOperator::new( + "equals", + doc! { + "path" : path.to_bson(), "value" : value.into(), + }, + ) +} +#[export_doc(equals)] +impl SearchOperator { + #[allow(missing_docs)] + pub fn score(mut self, score: Document) -> Self { + self.spec.insert("score", score); + self + } +} +///`exists` Atlas Search operator. Construct with [`exists`](exists()). +pub struct Exists; +/**The exists operator tests if a path to a specified indexed field name exists in a document. + * */ +/// +///For more details, see the [exists operator reference](https://www.mongodb.com/docs/atlas/atlas-search/exists/). +#[options_doc(exists, "into_stage")] +pub fn exists(path: impl StringOrArray) -> SearchOperator { + SearchOperator::new( + "exists", + doc! { + "path" : path.to_bson(), + }, + ) +} +#[export_doc(exists)] +impl SearchOperator { + #[allow(missing_docs)] + pub fn score(mut self, score: Document) -> Self { + self.spec.insert("score", score); + self + } +} +///`facet` Atlas Search operator. Construct with [`facet`](facet()). +pub struct Facet; +/**The facet collector groups results by values or ranges in the specified +faceted fields and returns the count for each of those groups. +*/ +/// +///For more details, see the [facet operator reference](https://www.mongodb.com/docs/atlas/atlas-search/facet/). +#[options_doc(facet, "into_stage")] +pub fn facet(facets: Document) -> SearchOperator { + SearchOperator::new( + "facet", + doc! { + "facets" : facets, + }, + ) +} +#[export_doc(facet)] +impl SearchOperator { + #[allow(missing_docs)] + pub fn operator(mut self, operator: impl SearchOperatorParam) -> Self { + self.spec.insert("operator", operator.to_bson()); + self + } +} +///`geoShape` Atlas Search operator. Construct with [`geo_shape`](geo_shape()). +pub struct GeoShape; +/**The geoShape operator supports querying shapes with a relation to a given +geometry if indexShapes is set to true in the index definition. +*/ +/// +///For more details, see the [geoShape operator reference](https://www.mongodb.com/docs/atlas/atlas-search/geoShape/). +#[options_doc(geo_shape, "into_stage")] +pub fn geo_shape( + path: impl StringOrArray, + relation: Relation, + geometry: Document, +) -> SearchOperator { + SearchOperator::new( + "geoShape", + doc! { + "path" : path.to_bson(), "relation" : relation.name(), "geometry" : geometry, + }, + ) +} +#[export_doc(geo_shape)] +impl SearchOperator { + #[allow(missing_docs)] + pub fn score(mut self, score: Document) -> Self { + self.spec.insert("score", score); + self + } +} +///`geoWithin` Atlas Search operator. Construct with [`geo_within`](geo_within()). +pub struct GeoWithin; +/**The geoWithin operator supports querying geographic points within a given +geometry. Only points are returned, even if indexShapes value is true in +the index definition. +*/ +/// +///For more details, see the [geoWithin operator reference](https://www.mongodb.com/docs/atlas/atlas-search/geoWithin/). +#[options_doc(geo_within, "into_stage")] +pub fn geo_within(path: impl StringOrArray) -> SearchOperator { + SearchOperator::new( + "geoWithin", + doc! { + "path" : path.to_bson(), + }, + ) +} +#[export_doc(geo_within)] +impl SearchOperator { + #[allow(missing_docs)] + pub fn geo_box(mut self, geo_box: Document) -> Self { + self.spec.insert("box", geo_box); + self + } + #[allow(missing_docs)] + pub fn circle(mut self, circle: Document) -> Self { + self.spec.insert("circle", circle); + self + } + #[allow(missing_docs)] + pub fn geometry(mut self, geometry: Document) -> Self { + self.spec.insert("geometry", geometry); + self + } + #[allow(missing_docs)] + pub fn score(mut self, score: Document) -> Self { + self.spec.insert("score", score); + self + } +} +///`in` Atlas Search operator. Construct with [`search_in`](search_in()). +pub struct SearchIn; +/**The in operator performs a search for an array of BSON values in a field. + * */ +/// +///For more details, see the [in operator reference](https://www.mongodb.com/docs/atlas/atlas-search/in/). +#[options_doc(search_in, "into_stage")] +pub fn search_in(path: impl StringOrArray, value: impl Into) -> SearchOperator { + SearchOperator::new( + "in", + doc! { + "path" : path.to_bson(), "value" : value.into(), + }, + ) +} +#[export_doc(search_in)] +impl SearchOperator { + #[allow(missing_docs)] + pub fn score(mut self, score: Document) -> Self { + self.spec.insert("score", score); + self + } +} +///`moreLikeThis` Atlas Search operator. Construct with [`more_like_this`](more_like_this()). +pub struct MoreLikeThis; +/**The moreLikeThis operator returns documents similar to input documents. +The moreLikeThis operator allows you to build features for your applications +that display similar or alternative results based on one or more given documents. +*/ +/// +///For more details, see the [moreLikeThis operator reference](https://www.mongodb.com/docs/atlas/atlas-search/moreLikeThis/). +#[options_doc(more_like_this, "into_stage")] +pub fn more_like_this(like: impl DocumentOrArray) -> SearchOperator { + SearchOperator::new( + "moreLikeThis", + doc! { + "like" : like.to_bson(), + }, + ) +} +#[export_doc(more_like_this)] +impl SearchOperator { + #[allow(missing_docs)] + pub fn score(mut self, score: Document) -> Self { + self.spec.insert("score", score); + self + } +} +///`near` Atlas Search operator. Construct with [`near`](near()). +pub struct Near; +/**The near operator supports querying and scoring numeric, date, and GeoJSON point values. + * */ +/// +///For more details, see the [near operator reference](https://www.mongodb.com/docs/atlas/atlas-search/near/). +#[options_doc(near, "into_stage")] +pub fn near( + path: impl StringOrArray, + origin: impl NearOrigin, + pivot: impl BsonNumber, +) -> SearchOperator { + SearchOperator::new( + "near", + doc! { + "path" : path.to_bson(), "origin" : origin.to_bson(), "pivot" : pivot + .to_bson(), + }, + ) +} +#[export_doc(near)] +impl SearchOperator { + #[allow(missing_docs)] + pub fn score(mut self, score: Document) -> Self { + self.spec.insert("score", score); + self + } +} +///`phrase` Atlas Search operator. Construct with [`phrase`](phrase()). +pub struct Phrase; +/**The phrase operator performs search for documents containing an ordered sequence of terms + * using the analyzer specified in the index configuration. + * */ +/// +///For more details, see the [phrase operator reference](https://www.mongodb.com/docs/atlas/atlas-search/phrase/). +#[options_doc(phrase, "into_stage")] +pub fn phrase(path: impl StringOrArray, query: impl StringOrArray) -> SearchOperator { + SearchOperator::new( + "phrase", + doc! { + "path" : path.to_bson(), "query" : query.to_bson(), + }, + ) +} +#[export_doc(phrase)] +impl SearchOperator { + #[allow(missing_docs)] + pub fn slop(mut self, slop: i32) -> Self { + self.spec.insert("slop", slop); self } #[allow(missing_docs)] pub fn synonyms(mut self, synonyms: impl AsRef) -> Self { - self.stage.insert("synonyms", synonyms.as_ref()); + self.spec.insert("synonyms", synonyms.as_ref()); self } #[allow(missing_docs)] pub fn score(mut self, score: Document) -> Self { - self.stage.insert("score", score); + self.spec.insert("score", score); self } } -#[allow(missing_docs)] -pub struct Compound; -impl AtlasSearch { - /**The compound operator combines two or more operators into a single query. - Each element of a compound query is called a clause, and each clause - consists of one or more sub-queries. - */ - /// - ///For more details, see the [compound operator reference](https://www.mongodb.com/docs/atlas/atlas-search/compound/). - pub fn compound() -> Self { - AtlasSearch { - name: "compound", - stage: doc! {}, - _t: PhantomData, - } - } - #[allow(missing_docs)] - pub fn must(mut self, must: impl IntoIterator>) -> Self { - self.stage.insert( - "must", - must.into_iter().map(Document::from).collect::>(), - ); +///`queryString` Atlas Search operator. Construct with [`query_string`](query_string()). +pub struct QueryString; +/// +/// +///For more details, see the [queryString operator reference](https://www.mongodb.com/docs/atlas/atlas-search/queryString/). +#[options_doc(query_string, "into_stage")] +pub fn query_string( + default_path: impl StringOrArray, + query: impl AsRef, +) -> SearchOperator { + SearchOperator::new( + "queryString", + doc! { + "defaultPath" : default_path.to_bson(), "query" : query.as_ref(), + }, + ) +} +#[export_doc(query_string)] +impl SearchOperator {} +///`range` Atlas Search operator. Construct with [`range`](range()). +pub struct Range; +/**The range operator supports querying and scoring numeric, date, and string values. +You can use this operator to find results that are within a given numeric, date, objectId, or letter (from the English alphabet) range. +*/ +/// +///For more details, see the [range operator reference](https://www.mongodb.com/docs/atlas/atlas-search/range/). +#[options_doc(range, "into_stage")] +pub fn range(path: impl StringOrArray) -> SearchOperator { + SearchOperator::new( + "range", + doc! { + "path" : path.to_bson(), + }, + ) +} +#[export_doc(range)] +impl SearchOperator { + #[allow(missing_docs)] + pub fn gt(mut self, gt: impl RangeValue) -> Self { + self.spec.insert("gt", gt.to_bson()); self } #[allow(missing_docs)] - pub fn must_not(mut self, must_not: impl IntoIterator>) -> Self { - self.stage.insert( - "mustNot", - must_not.into_iter().map(Document::from).collect::>(), - ); + pub fn gte(mut self, gte: impl RangeValue) -> Self { + self.spec.insert("gte", gte.to_bson()); self } #[allow(missing_docs)] - pub fn should(mut self, should: impl IntoIterator>) -> Self { - self.stage.insert( - "should", - should.into_iter().map(Document::from).collect::>(), - ); + pub fn lt(mut self, lt: impl RangeValue) -> Self { + self.spec.insert("lt", lt.to_bson()); self } #[allow(missing_docs)] - pub fn filter(mut self, filter: impl IntoIterator>) -> Self { - self.stage.insert( - "filter", - filter.into_iter().map(Document::from).collect::>(), - ); + pub fn lte(mut self, lte: impl RangeValue) -> Self { + self.spec.insert("lte", lte.to_bson()); self } #[allow(missing_docs)] - pub fn minimum_should_match(mut self, minimum_should_match: i32) -> Self { - self.stage - .insert("minimumShouldMatch", minimum_should_match); + pub fn score(mut self, score: Document) -> Self { + self.spec.insert("score", score); + self + } +} +///`regex` Atlas Search operator. Construct with [`regex`](regex()). +pub struct Regex; +/**regex interprets the query field as a regular expression. +regex is a term-level operator, meaning that the query field isn't analyzed. +*/ +/// +///For more details, see the [regex operator reference](https://www.mongodb.com/docs/atlas/atlas-search/regex/). +#[options_doc(regex, "into_stage")] +pub fn regex(path: impl StringOrArray, query: impl AsRef) -> SearchOperator { + SearchOperator::new( + "regex", + doc! { + "path" : path.to_bson(), "query" : query.as_ref(), + }, + ) +} +#[export_doc(regex)] +impl SearchOperator { + #[allow(missing_docs)] + pub fn allow_analyzed_field(mut self, allow_analyzed_field: bool) -> Self { + self.spec.insert("allowAnalyzedField", allow_analyzed_field); + self + } + #[allow(missing_docs)] + pub fn score(mut self, score: Document) -> Self { + self.spec.insert("score", score); + self + } +} +///`text` Atlas Search operator. Construct with [`text`](text()). +pub struct Text; +/**The text operator performs a full-text search using the analyzer that you specify in the index configuration. +If you omit an analyzer, the text operator uses the default standard analyzer. +*/ +/// +///For more details, see the [text operator reference](https://www.mongodb.com/docs/atlas/atlas-search/text/). +#[options_doc(text, "into_stage")] +pub fn text(path: impl StringOrArray, query: impl StringOrArray) -> SearchOperator { + SearchOperator::new( + "text", + doc! { + "path" : path.to_bson(), "query" : query.to_bson(), + }, + ) +} +#[export_doc(text)] +impl SearchOperator { + #[allow(missing_docs)] + pub fn fuzzy(mut self, fuzzy: Document) -> Self { + self.spec.insert("fuzzy", fuzzy); + self + } + #[allow(missing_docs)] + pub fn match_criteria(mut self, match_criteria: MatchCriteria) -> Self { + self.spec.insert("matchCriteria", match_criteria.name()); + self + } + #[allow(missing_docs)] + pub fn synonyms(mut self, synonyms: impl AsRef) -> Self { + self.spec.insert("synonyms", synonyms.as_ref()); + self + } + #[allow(missing_docs)] + pub fn score(mut self, score: Document) -> Self { + self.spec.insert("score", score); + self + } +} +///`wildcard` Atlas Search operator. Construct with [`wildcard`](wildcard()). +pub struct Wildcard; +/**The wildcard operator enables queries which use special characters in the search string that + * can match any character. + * */ +/// +///For more details, see the [wildcard operator reference](https://www.mongodb.com/docs/atlas/atlas-search/wildcard/). +#[options_doc(wildcard, "into_stage")] +pub fn wildcard(path: impl StringOrArray, query: impl AsRef) -> SearchOperator { + SearchOperator::new( + "wildcard", + doc! { + "path" : path.to_bson(), "query" : query.as_ref(), + }, + ) +} +#[export_doc(wildcard)] +impl SearchOperator { + #[allow(missing_docs)] + pub fn allow_analyzed_field(mut self, allow_analyzed_field: bool) -> Self { + self.spec.insert("allowAnalyzedField", allow_analyzed_field); self } #[allow(missing_docs)] pub fn score(mut self, score: Document) -> Self { - self.stage.insert("score", score); + self.spec.insert("score", score); self } } diff --git a/src/lib.rs b/src/lib.rs index 77c5f65cc..d41b2da1c 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -21,7 +21,7 @@ pub mod options; pub use ::mongocrypt; pub mod action; -//pub mod atlas_search; +pub mod atlas_search; pub(crate) mod bson_compat; mod bson_util; pub mod change_stream; diff --git a/src/test.rs b/src/test.rs index 751247855..f9de13f0e 100644 --- a/src/test.rs +++ b/src/test.rs @@ -4,6 +4,7 @@ #[cfg(feature = "dns-resolver")] #[path = "test/atlas_connectivity.rs"] mod atlas_connectivity_skip_ci; // requires Atlas URI environment variables set +mod atlas_search; mod auth; mod bulk_write; mod change_stream; diff --git a/src/test/atlas_search.rs b/src/test/atlas_search.rs new file mode 100644 index 000000000..d3a455f86 --- /dev/null +++ b/src/test/atlas_search.rs @@ -0,0 +1,355 @@ +use crate::{ + atlas_search::*, + bson::{doc, DateTime}, +}; + +#[test] +fn helper_output_doc() { + assert_eq!( + doc! { + "$search": { + "autocomplete": { + "query": "pre", + "path": "title", + "fuzzy": { + "maxEdits": 1, + "prefixLength": 1, + "maxExpansions": 256, + }, + } + } + }, + autocomplete("title", "pre") + .fuzzy(doc! { "maxEdits": 1, "prefixLength": 1, "maxExpansions": 256 }) + .into_stage() + ); + assert_eq!( + doc! { + "$search": { + "text": { + "path": "plot", + "query": "baseball", + } + } + }, + text("plot", "baseball").into_stage() + ); + assert_eq!( + doc! { + "$search": { + "compound": { + "must": [{ + "text": { + "path": "description", + "query": "varieties", + } + }], + "should": [{ + "text": { + "path": "description", + "query": "Fuji", + } + }], + } + } + }, + compound() + .must(text("description", "varieties")) + .should(text("description", "Fuji")) + .into_stage() + ); + assert_eq!( + doc! { + "$search": { + "embeddedDocument": { + "path": "items", + "operator": { + "compound": { + "must": [{ + "text": { + "path": "items.tags", + "query": "school", + } + }], + "should": [{ + "text": { + "path": "items.name", + "query": "backpack", + } + }] + } + }, + "score": { + "embedded": { + "aggregate": "mean" + } + }, + } + } + }, + embedded_document( + "items", + compound() + .must(text("items.tags", "school")) + .should(text("items.name", "backpack")), + ) + .score(doc! { + "embedded": { + "aggregate": "mean" + } + }) + .into_stage() + ); + assert_eq!( + doc! { + "$search": { + "equals": { + "path": "verified_user", + "value": true, + } + } + }, + equals("verified_user", true).into_stage() + ); + let gte_dt = DateTime::parse_rfc3339_str("2000-01-01T00:00:00.000Z").unwrap(); + let lte_dt = DateTime::parse_rfc3339_str("2015-01-31T00:00:00.000Z").unwrap(); + assert_eq!( + doc! { + "$searchMeta": { + "facet": { + "operator": { + "range": { + "path": "released", + "gte": gte_dt, + "lte": lte_dt, + } + }, + "facets": { + "directorsFacet": { + "type": "string", + "path": "directors", + "numBuckets": 7, + }, + "yearFacet": { + "type": "number", + "path": "year", + "boundaries": [2000, 2005, 2010, 2015] + }, + } + } + } + }, + facet(doc! { + "directorsFacet": facet::string("directors").num_buckets(7), + "yearFacet": facet::number("year", [2000, 2005, 2010, 2015]), + }) + .operator(range("released").gte(gte_dt).lte(lte_dt)) + .into_stage_meta() + ); + assert_eq!( + doc! { + "$search": { + "geoShape": { + "relation": "disjoint", + "geometry": { + "type": "Polygon", + "coordinates": [[[-161.323242,22.512557], + [-152.446289,22.065278], + [-156.09375,17.811456], + [-161.323242,22.512557]]] + }, + "path": "address.location" + } + } + }, + geo_shape( + "address.location", + Relation::Disjoint, + doc! { + "type": "Polygon", + "coordinates": [[[-161.323242,22.512557], + [-152.446289,22.065278], + [-156.09375,17.811456], + [-161.323242,22.512557]]] + } + ) + .into_stage() + ); + assert_eq!( + doc! { + "$search": { + "geoWithin": { + "path": "address.location", + "box": { + "bottomLeft": { + "type": "Point", + "coordinates": [112.467, -55.050] + }, + "topRight": { + "type": "Point", + "coordinates": [168.000, -9.133] + } + } + } + } + }, + geo_within("address.location") + .geo_box(doc! { + "bottomLeft": { + "type": "Point", + "coordinates": [112.467, -55.050] + }, + "topRight": { + "type": "Point", + "coordinates": [168.000, -9.133] + } + }) + .into_stage() + ); + assert_eq!( + doc! { + "$search": { + "in": { + "path": "accounts", + "value": [371138, 371139, 371140] + } + } + }, + search_in("accounts", [371138, 371139, 371140].as_ref()).into_stage() + ); + assert_eq!( + doc! { + "$search": { + "moreLikeThis": { + "like": { + "title": "The Godfather", + "genres": "action" + } + } + } + }, + more_like_this(doc! { + "title": "The Godfather", + "genres": "action" + }) + .into_stage() + ); + assert_eq!( + doc! { + "$search": { + "index": "runtimes", + "near": { + "path": "year", + "origin": 2000, + "pivot": 2 + } + } + }, + search(near("year", 2000, 2)).index("runtimes").into_stage() + ); + let dt = DateTime::parse_rfc3339_str("1915-09-13T00:00:00.000+00:00").unwrap(); + assert_eq!( + doc! { + "$search": { + "index": "releaseddate", + "near": { + "path": "released", + "origin": dt, + "pivot": 7776000000i64 + } + } + }, + search(near("released", dt, 7776000000i64)) + .index("releaseddate") + .into_stage() + ); + assert_eq!( + doc! { + "$search": { + "near": { + "origin": { + "type": "Point", + "coordinates": [-8.61308, 41.1413] + }, + "pivot": 1000, + "path": "address.location" + } + } + }, + near( + "address.location", + doc! { + "type": "Point", + "coordinates": [-8.61308, 41.1413] + }, + 1000, + ) + .into_stage() + ); + assert_eq!( + doc! { + "$search": { + "phrase": { + "path": "title", + "query": "new york" + } + } + }, + phrase("title", "new york").into_stage() + ); + #[cfg(feature = "bson-3")] + assert_eq!( + doc! { + "$search": { + "phrase": { + "path": "title", + "query": ["the man", "the moon"] + } + } + }, + phrase("title", ["the man", "the moon"]).into_stage() + ); + assert_eq!( + doc! { + "$search": { + "queryString": { + "defaultPath": "title", + "query": "Rocky AND (IV OR 4 OR Four)" + } + } + }, + query_string("title", "Rocky AND (IV OR 4 OR Four)").into_stage() + ); + assert_eq!( + doc! { + "$search": { + "regex": { + "path": "title", + "query": "(.*) Seattle" + } + } + }, + regex("title", "(.*) Seattle").into_stage() + ); + assert_eq!( + doc! { + "$search": { + "wildcard": { + "query": "Green D*", + "path": "title" + } + } + }, + wildcard("title", "Green D*").into_stage() + ); +} + +#[test] +fn string_or_array_forms() { + exists("hello"); + exists("hello".to_owned()); + #[cfg(feature = "bson-3")] + exists(["hello", "world"]); + exists(&["hello", "world"] as &[&str]); + exists(&["hello".to_owned()] as &[String]); +}