From f6da9d9920c787a6a019fc4c5ec57d8bd09bebd4 Mon Sep 17 00:00:00 2001 From: Tommy Smith Date: Fri, 29 Aug 2025 11:47:28 +0100 Subject: [PATCH] Remove all BC checks for features new in <1.27.0 --- .github/workflows/main.yaml | 4 - src/collections/config/classes.ts | 8 +- src/collections/config/index.ts | 13 +- src/collections/config/utils.ts | 13 +- src/collections/deserialize/index.ts | 17 +- src/collections/generate/index.ts | 110 +-- src/collections/index.ts | 94 +- src/collections/query/check.ts | 152 +--- src/collections/query/index.ts | 110 +-- src/collections/serialize/index.ts | 173 +--- src/collections/serialize/unit.test.ts | 109 +-- src/collections/tenants/index.ts | 33 +- src/collections/tenants/unit.test.ts | 4 +- src/openapi/schema.ts | 150 +++- src/proto/v1/base.ts | 13 + src/proto/v1/base_search.ts | 121 +-- src/proto/v1/file_replication.ts | 1140 ++++++++++++++++++++++++ src/proto/v1/generative.ts | 18 +- src/proto/v1/properties.ts | 37 +- src/proto/v1/search_get.ts | 172 +--- src/utils/dbVersion.ts | 118 +-- 21 files changed, 1472 insertions(+), 1137 deletions(-) create mode 100644 src/proto/v1/file_replication.ts diff --git a/.github/workflows/main.yaml b/.github/workflows/main.yaml index 78db4a34..5ff3e3eb 100644 --- a/.github/workflows/main.yaml +++ b/.github/workflows/main.yaml @@ -7,8 +7,6 @@ on: pull_request: env: - WEAVIATE_125: 1.25.34 - WEAVIATE_126: 1.26.17 WEAVIATE_127: 1.27.27 WEAVIATE_128: 1.28.16 WEAVIATE_129: 1.29.9 @@ -42,8 +40,6 @@ jobs: fail-fast: false matrix: versions: [ - { node: "22.x", weaviate: $WEAVIATE_125}, - { node: "22.x", weaviate: $WEAVIATE_126}, { node: "22.x", weaviate: $WEAVIATE_127}, { node: "22.x", weaviate: $WEAVIATE_128}, { node: "22.x", weaviate: $WEAVIATE_129}, diff --git a/src/collections/config/classes.ts b/src/collections/config/classes.ts index c27f243f..2a2e2350 100644 --- a/src/collections/config/classes.ts +++ b/src/collections/config/classes.ts @@ -30,11 +30,7 @@ import { } from './types/index.js'; export class MergeWithExisting { - static schema( - current: WeaviateClass, - supportsNamedVectors: boolean, - update?: CollectionConfigUpdate - ): WeaviateClass { + static schema(current: WeaviateClass, update?: CollectionConfigUpdate): WeaviateClass { if (update === undefined) return current; if (update.description !== undefined) current.description = update.description; if (update.propertyDescriptions !== undefined) @@ -61,7 +57,7 @@ export class MergeWithExisting { if (update.vectorizers !== undefined) { if (Array.isArray(update.vectorizers)) { current.vectorConfig = MergeWithExisting.vectors(current.vectorConfig, update.vectorizers); - } else if (supportsNamedVectors && current.vectorConfig !== undefined) { + } else if (current.vectorConfig !== undefined) { const updateVectorizers = { ...update.vectorizers, name: 'default', diff --git a/src/collections/config/index.ts b/src/collections/config/index.ts index bdc7ed4e..c06013c5 100644 --- a/src/collections/config/index.ts +++ b/src/collections/config/index.ts @@ -50,9 +50,8 @@ const config = ( .withProperty(resolveReference(reference)) .do() .then(() => {}), - addVector: async (vectors: VectorizersConfigAdd) => { - const supportsDynamicVectorIndex = await dbVersionSupport.supportsDynamicVectorIndex(); - const { vectorsConfig } = makeVectorsConfig(vectors, supportsDynamicVectorIndex); + addVector: (vectors: VectorizersConfigAdd) => { + const { vectorsConfig } = makeVectorsConfig(vectors); return new VectorAdder(connection).withClassName(name).withVectors(vectorsConfig).do(); }, get: () => getRaw().then(classToCollection), @@ -90,13 +89,7 @@ const config = ( }, update: (config?: CollectionConfigUpdate) => { return getRaw() - .then(async (current) => - MergeWithExisting.schema( - current, - await dbVersionSupport.supportsNamedVectors().then((s) => s.supports), - config - ) - ) + .then((current) => MergeWithExisting.schema(current, config)) .then((merged) => new ClassUpdater(connection).withClass(merged).do()) .then(() => {}); }, diff --git a/src/collections/config/utils.ts b/src/collections/config/utils.ts index 17b3dec5..00b9eaec 100644 --- a/src/collections/config/utils.ts +++ b/src/collections/config/utils.ts @@ -1,8 +1,4 @@ -import { - WeaviateDeserializationError, - WeaviateInvalidInputError, - WeaviateUnsupportedFeatureError, -} from '../../errors.js'; +import { WeaviateDeserializationError, WeaviateInvalidInputError } from '../../errors.js'; import { WeaviateBM25Config, WeaviateClass, @@ -17,7 +13,6 @@ import { WeaviateVectorIndexConfig, WeaviateVectorsConfig, } from '../../openapi/types.js'; -import { DbVersionSupport } from '../../utils/dbVersion.js'; import { MultiVectorEncodingGuards, QuantizerGuards, VectorIndexGuards } from '../configure/parsing.js'; import { PropertyConfigCreate, @@ -237,8 +232,7 @@ export const parseVectorizerConfig = (config?: VectorizerConfig): any => { }; export const makeVectorsConfig = ( - configVectorizers: VectorizersConfigCreate | VectorizersConfigAdd, - supportsDynamicVectorIndex: Awaited> + configVectorizers: VectorizersConfigCreate | VectorizersConfigAdd ) => { let vectorizers: string[] = []; const vectorsConfig: Record = {}; @@ -251,9 +245,6 @@ export const makeVectorsConfig = ( }, ]; vectorizersConfig.forEach((v) => { - if (v.vectorIndex.name === 'dynamic' && !supportsDynamicVectorIndex.supports) { - throw new WeaviateUnsupportedFeatureError(supportsDynamicVectorIndex.message); - } const vectorConfig: any = { vectorIndexConfig: parseVectorIndex(v.vectorIndex), vectorIndexType: v.vectorIndex.name, diff --git a/src/collections/deserialize/index.ts b/src/collections/deserialize/index.ts index 8982d723..9c57ae18 100644 --- a/src/collections/deserialize/index.ts +++ b/src/collections/deserialize/index.ts @@ -57,15 +57,8 @@ const UINT16LEN = 2; const UINT32LEN = 4; export class Deserialize { - private supports125ListValue: boolean; - - private constructor(supports125ListValue: boolean) { - this.supports125ListValue = supports125ListValue; - } - - public static async use(support: DbVersionSupport): Promise { - const supports125ListValue = await support.supports125ListValue().then((res) => res.supports); - return new Deserialize(supports125ListValue); + public static use(support: DbVersionSupport): Promise { + return Promise.resolve(new Deserialize()); } private static aggregateBoolean( @@ -374,13 +367,9 @@ export class Deserialize { if (value.boolValue !== undefined) return value.boolValue; if (value.dateValue !== undefined) return new Date(value.dateValue); if (value.intValue !== undefined) return value.intValue; - if (value.listValue !== undefined) - return this.supports125ListValue - ? this.parseListValue(value.listValue) - : value.listValue.values.map((v) => this.parsePropertyValue(v)); + if (value.listValue !== undefined) return this.parseListValue(value.listValue); if (value.numberValue !== undefined) return value.numberValue; if (value.objectValue !== undefined) return this.objectProperties(value.objectValue); - if (value.stringValue !== undefined) return value.stringValue; if (value.textValue !== undefined) return value.textValue; if (value.uuidValue !== undefined) return value.uuidValue; if (value.blobValue !== undefined) return value.blobValue; diff --git a/src/collections/generate/index.ts b/src/collections/generate/index.ts index db532b71..0e1c4e35 100644 --- a/src/collections/generate/index.ts +++ b/src/collections/generate/index.ts @@ -77,7 +77,7 @@ class GenerateManager implements Generate { opts?: FetchObjectsOptions ): Promise> { return Promise.all([ - this.check.fetchObjects(opts), + this.check.fetchObjects(), this.check.supportForSingleGroupedGenerative(), this.check.supportForGenerativeConfigRuntime(generate.config), ]) @@ -116,7 +116,7 @@ class GenerateManager implements Generate { C extends GenerativeConfigRuntime | undefined = undefined >(query: string, generate: GenerateOptions, opts?: Bm25Options): GenerateReturn { return Promise.all([ - this.check.bm25(opts), + this.check.bm25(), this.check.supportForSingleGroupedGenerative(), this.check.supportForGenerativeConfigRuntime(generate.config), ]) @@ -159,27 +159,19 @@ class GenerateManager implements Generate { this.check.supportForSingleGroupedGenerative(), this.check.supportForGenerativeConfigRuntime(generate.config), ]) - .then( - async ([ - { search, supportsTargets, supportsVectorsForTargets, supportsWeightsForTargets, supportsVectors }, - supportsSingleGrouped, - ]) => ({ - search, - args: { - ...(await Serialize.search.hybrid( - { - query, - supportsTargets, - supportsVectorsForTargets, - supportsWeightsForTargets, - supportsVectors, - }, - opts - )), - generative: await Serialize.generative({ supportsSingleGrouped }, generate), - }, - }) - ) + .then(async ([{ search, supportsVectors }, supportsSingleGrouped]) => ({ + search, + args: { + ...(await Serialize.search.hybrid( + { + query, + supportsVectors, + }, + opts + )), + generative: await Serialize.generative({ supportsSingleGrouped }, generate), + }, + })) .then(({ search, args }) => search.withHybrid(args)) .then((reply) => this.parseGroupByReply(opts, reply)); } @@ -212,18 +204,16 @@ class GenerateManager implements Generate { opts?: NearOptions ): GenerateReturn { return Promise.all([ - this.check.nearSearch(opts), + this.check.nearSearch(), this.check.supportForSingleGroupedGenerative(), this.check.supportForGenerativeConfigRuntime(generate.config), ]) - .then(async ([{ search, supportsTargets, supportsWeightsForTargets }, supportsSingleGrouped]) => ({ + .then(async ([{ search }, supportsSingleGrouped]) => ({ search, args: { ...Serialize.search.nearImage( { image: await toBase64FromMedia(image), - supportsTargets, - supportsWeightsForTargets, }, opts ), @@ -258,18 +248,16 @@ class GenerateManager implements Generate { C extends GenerativeConfigRuntime | undefined = undefined >(id: string, generate: GenerateOptions, opts?: NearOptions): GenerateReturn { return Promise.all([ - this.check.nearSearch(opts), + this.check.nearSearch(), this.check.supportForSingleGroupedGenerative(), this.check.supportForGenerativeConfigRuntime(generate.config), ]) - .then(async ([{ search, supportsTargets, supportsWeightsForTargets }, supportsSingleGrouped]) => ({ + .then(async ([{ search }, supportsSingleGrouped]) => ({ search, args: { ...Serialize.search.nearObject( { id, - supportsTargets, - supportsWeightsForTargets, }, opts ), @@ -308,18 +296,16 @@ class GenerateManager implements Generate { opts?: NearOptions ): GenerateReturn { return Promise.all([ - this.check.nearSearch(opts), + this.check.nearSearch(), this.check.supportForSingleGroupedGenerative(), this.check.supportForGenerativeConfigRuntime(generate.config), ]) - .then(async ([{ search, supportsTargets, supportsWeightsForTargets }, supportsSingleGrouped]) => ({ + .then(async ([{ search }, supportsSingleGrouped]) => ({ search, args: { ...Serialize.search.nearText( { query, - supportsTargets, - supportsWeightsForTargets, }, opts ), @@ -362,27 +348,19 @@ class GenerateManager implements Generate { this.check.supportForSingleGroupedGenerative(), this.check.supportForGenerativeConfigRuntime(generate.config), ]) - .then( - async ([ - { search, supportsTargets, supportsVectorsForTargets, supportsWeightsForTargets, supportsVectors }, - supportsSingleGrouped, - ]) => ({ - search, - args: { - ...(await Serialize.search.nearVector( - { - vector, - supportsTargets, - supportsVectorsForTargets, - supportsWeightsForTargets, - supportsVectors, - }, - opts - )), - generative: await Serialize.generative({ supportsSingleGrouped }, generate), - }, - }) - ) + .then(async ([{ search, supportsVectors }, supportsSingleGrouped]) => ({ + search, + args: { + ...(await Serialize.search.nearVector( + { + vector, + supportsVectors, + }, + opts + )), + generative: await Serialize.generative({ supportsSingleGrouped }, generate), + }, + })) .then(({ search, args }) => search.withNearVector(args)) .then((reply) => this.parseGroupByReply(opts, reply)); } @@ -418,56 +396,52 @@ class GenerateManager implements Generate { opts?: NearOptions ): GenerateReturn { return Promise.all([ - this.check.nearSearch(opts), + this.check.nearSearch(), this.check.supportForSingleGroupedGenerative(), this.check.supportForGenerativeConfigRuntime(generate.config), ]) - .then(([{ search, supportsTargets, supportsWeightsForTargets }, supportsSingleGrouped]) => { - const args = { - supportsTargets, - supportsWeightsForTargets, - }; + .then(([{ search }, supportsSingleGrouped]) => { let send: (media: string, generative: GenerativeSearch) => Promise; switch (type) { case 'audio': send = (media, generative) => search.withNearAudio({ - ...Serialize.search.nearAudio({ audio: media, ...args }, opts), + ...Serialize.search.nearAudio({ audio: media }, opts), generative, }); break; case 'depth': send = (media, generative) => search.withNearDepth({ - ...Serialize.search.nearDepth({ depth: media, ...args }, opts), + ...Serialize.search.nearDepth({ depth: media }, opts), generative, }); break; case 'image': send = (media, generative) => search.withNearImage({ - ...Serialize.search.nearImage({ image: media, ...args }, opts), + ...Serialize.search.nearImage({ image: media }, opts), generative, }); break; case 'imu': send = (media, generative) => search.withNearIMU({ - ...Serialize.search.nearIMU({ imu: media, ...args }, opts), + ...Serialize.search.nearIMU({ imu: media }, opts), generative, }); break; case 'thermal': send = (media, generative) => search.withNearThermal({ - ...Serialize.search.nearThermal({ thermal: media, ...args }, opts), + ...Serialize.search.nearThermal({ thermal: media }, opts), generative, }); break; case 'video': send = (media, generative) => search.withNearVideo({ - ...Serialize.search.nearVideo({ video: media, ...args }), + ...Serialize.search.nearVideo({ video: media }), generative, }); break; diff --git a/src/collections/index.ts b/src/collections/index.ts index 3e344691..231e30fd 100644 --- a/src/collections/index.ts +++ b/src/collections/index.ts @@ -1,19 +1,10 @@ import Connection from '../connection/grpc.js'; -import { WeaviateUnsupportedFeatureError } from '../errors.js'; import { WeaviateClass } from '../openapi/types.js'; import ClassExists from '../schema/classExists.js'; import { ClassCreator, ClassDeleter, ClassGetter, SchemaGetter } from '../schema/index.js'; import { DbVersionSupport } from '../utils/dbVersion.js'; import collection, { Collection } from './collection/index.js'; -import { - classToCollection, - makeVectorsConfig, - parseVectorIndex, - parseVectorizerConfig, - resolveProperty, - resolveReference, -} from './config/utils.js'; -import { configGuards } from './index.js'; +import { classToCollection, makeVectorsConfig, resolveProperty, resolveReference } from './config/utils.js'; import { CollectionConfig, GenerativeConfig, @@ -28,12 +19,9 @@ import { Reranker, RerankerConfig, ShardingConfigCreate, - VectorConfigCreate, - Vectorizer, VectorizersConfigCreate, Vectors, } from './types/index.js'; -import { PrimitiveKeys } from './types/internal.js'; /** * All the options available when creating a new collection. @@ -80,10 +68,6 @@ const collections = (connection: Connection, dbVersionSupport: DbVersionSupport) >(config: CollectionConfigCreate) { const { name, invertedIndex, multiTenancy, replication, sharding, ...rest } = config; - const supportsDynamicVectorIndex = await dbVersionSupport.supportsDynamicVectorIndex(); - const supportsNamedVectors = await dbVersionSupport.supportsNamedVectors(); - const supportsHNSWAndBQ = await dbVersionSupport.supportsHNSWAndBQ(); - const moduleConfig: any = {}; if (config.generative) { const generative = @@ -94,42 +78,7 @@ const collections = (connection: Connection, dbVersionSupport: DbVersionSupport) moduleConfig[config.reranker.name] = config.reranker.config ? config.reranker.config : {}; } - const makeLegacyVectorizer = ( - configVectorizers: VectorConfigCreate, undefined, string, Vectorizer> - ) => { - const vectorizer = - configVectorizers.vectorizer.name === 'text2vec-azure-openai' - ? 'text2vec-openai' - : configVectorizers.vectorizer.name; - const moduleConfig: any = {}; - moduleConfig[vectorizer] = parseVectorizerConfig(configVectorizers.vectorizer.config); - - const vectorIndexConfig = parseVectorIndex(configVectorizers.vectorIndex); - const vectorIndexType = configVectorizers.vectorIndex.name; - - if ( - vectorIndexType === 'hnsw' && - configVectorizers.vectorIndex.config !== undefined && - configGuards.quantizer.isBQ(configVectorizers.vectorIndex.config.quantizer as any) - ) { - if (!supportsHNSWAndBQ.supports) { - throw new WeaviateUnsupportedFeatureError(supportsHNSWAndBQ.message); - } - } - - if (vectorIndexType === 'dynamic' && !supportsDynamicVectorIndex.supports) { - throw new WeaviateUnsupportedFeatureError(supportsDynamicVectorIndex.message); - } - - return { - vectorizer, - moduleConfig, - vectorIndexConfig, - vectorIndexType, - }; - }; - - let schema: any = { + const schema: any = { ...rest, class: name, invertedIndexConfig: invertedIndex, @@ -138,39 +87,12 @@ const collections = (connection: Connection, dbVersionSupport: DbVersionSupport) replicationConfig: replication, shardingConfig: sharding, }; - let vectorizers: string[] = []; - if (supportsNamedVectors.supports) { - const { vectorsConfig, vectorizers: vecs } = config.vectorizers - ? makeVectorsConfig(config.vectorizers, supportsDynamicVectorIndex) - : { vectorsConfig: undefined, vectorizers: [] }; - schema.vectorConfig = vectorsConfig; - vectorizers = [...vecs]; - } else { - if (config.vectorizers !== undefined && Array.isArray(config.vectorizers)) { - throw new WeaviateUnsupportedFeatureError(supportsNamedVectors.message); - } - const configs = config.vectorizers - ? makeLegacyVectorizer({ ...config.vectorizers, name: undefined }) - : { - vectorizer: undefined, - moduleConfig: undefined, - vectorIndexConfig: undefined, - vectorIndexType: undefined, - }; - schema = { - ...schema, - moduleConfig: { - ...schema.moduleConfig, - ...configs.moduleConfig, - }, - vectorizer: configs.vectorizer, - vectorIndexConfig: configs.vectorIndexConfig, - vectorIndexType: configs.vectorIndexType, - }; - if (configs.vectorizer !== undefined) { - vectorizers = [configs.vectorizer]; - } - } + + const { vectorsConfig, vectorizers } = config.vectorizers + ? makeVectorsConfig(config.vectorizers) + : { vectorsConfig: undefined, vectorizers: [] }; + schema.vectorConfig = vectorsConfig; + const properties = config.properties ? config.properties.map((prop) => resolveProperty(prop as any, vectorizers)) : []; diff --git a/src/collections/query/check.ts b/src/collections/query/check.ts index 8cc1230f..0b01947d 100644 --- a/src/collections/query/check.ts +++ b/src/collections/query/check.ts @@ -2,19 +2,14 @@ import Connection from '../../connection/grpc.js'; import { WeaviateUnsupportedFeatureError } from '../../errors.js'; import { ConsistencyLevel } from '../../index.js'; import { DbVersionSupport } from '../../utils/dbVersion.js'; -import { GenerativeConfigRuntime, GroupByOptions } from '../index.js'; +import { GenerativeConfigRuntime } from '../index.js'; import { Serialize } from '../serialize/index.js'; import { - BaseBm25Options, BaseHybridOptions, BaseNearOptions, - FetchObjectByIdOptions, - FetchObjectsOptions, HybridNearTextSubSearch, HybridNearVectorSubSearch, - HybridOptions, NearVectorInputType, - SearchOptions, } from './types.js'; export class Check { @@ -40,66 +35,6 @@ export class Check { private getSearcher = () => this.connection.search(this.name, this.consistencyLevel, this.tenant); - private checkSupportForNamedVectors = async (opts?: BaseNearOptions) => { - if (!Serialize.isNamedVectors(opts)) return; - const check = await this.dbVersionSupport.supportsNamedVectors(); - if (!check.supports) throw new WeaviateUnsupportedFeatureError(check.message); - }; - - private checkSupportForBm25AndHybridGroupByQueries = async ( - query: 'Bm25' | 'Hybrid', - opts?: SearchOptions | GroupByOptions - ) => { - if (!Serialize.search.isGroupBy(opts)) return; - const check = await this.dbVersionSupport.supportsBm25AndHybridGroupByQueries(); - if (!check.supports) throw new WeaviateUnsupportedFeatureError(check.message(query)); - }; - - private checkSupportForHybridNearTextAndNearVectorSubSearches = async ( - opts?: HybridOptions - ) => { - if (opts?.vector === undefined || Array.isArray(opts.vector)) return; - const check = await this.dbVersionSupport.supportsHybridNearTextAndNearVectorSubsearchQueries(); - if (!check.supports) throw new WeaviateUnsupportedFeatureError(check.message); - }; - - private checkSupportForMultiTargetSearch = async (opts?: BaseNearOptions) => { - if (!Serialize.isMultiTarget(opts)) return false; - const check = await this.dbVersionSupport.supportsMultiTargetVectorSearch(); - if (!check.supports) throw new WeaviateUnsupportedFeatureError(check.message); - return check.supports; - }; - - private checkSupportForMultiVectorSearch = async ( - vec?: NearVectorInputType | HybridNearVectorSubSearch | HybridNearTextSubSearch - ) => { - if (vec === undefined || Serialize.isHybridNearTextSearch(vec)) return false; - if (Serialize.isHybridNearVectorSearch(vec) && !Serialize.isMultiVector(vec.vector)) return false; - if (Serialize.isHybridVectorSearch(vec) && !Serialize.isMultiVector(vec)) return false; - const check = await this.dbVersionSupport.supportsMultiVectorSearch(); - if (!check.supports) throw new WeaviateUnsupportedFeatureError(check.message); - return check.supports; - }; - - private checkSupportForMultiWeightPerTargetSearch = async (opts?: BaseNearOptions) => { - if (!Serialize.isMultiWeightPerTarget(opts)) return false; - const check = await this.dbVersionSupport.supportsMultiWeightsPerTargetSearch(); - if (!check.supports) throw new WeaviateUnsupportedFeatureError(check.message); - return check.supports; - }; - - private checkSupportForMultiVectorPerTargetSearch = async ( - vec?: NearVectorInputType | HybridNearVectorSubSearch | HybridNearTextSubSearch - ) => { - if (vec === undefined || Serialize.isHybridNearTextSearch(vec)) return false; - if (Serialize.isHybridNearVectorSearch(vec) && !Serialize.isMultiVectorPerTarget(vec.vector)) - return false; - if (Serialize.isHybridVectorSearch(vec) && !Serialize.isMultiVectorPerTarget(vec)) return false; - const check = await this.dbVersionSupport.supportsMultiVectorPerTargetSearch(); - if (!check.supports) throw new WeaviateUnsupportedFeatureError(check.message); - return check.supports; - }; - private checkSupportForVectors = async ( vec?: NearVectorInputType | HybridNearVectorSubSearch | HybridNearTextSubSearch ) => { @@ -121,45 +56,14 @@ export class Check { return check.supports; }; - public nearSearch = (opts?: BaseNearOptions) => { - return Promise.all([ - this.getSearcher(), - this.checkSupportForMultiTargetSearch(opts), - this.checkSupportForMultiWeightPerTargetSearch(opts), - this.checkSupportForNamedVectors(opts), - ]).then(([search, supportsTargets, supportsWeightsForTargets]) => { - const is126 = supportsTargets; - const is127 = supportsWeightsForTargets; - return { search, supportsTargets: is126 || is127, supportsWeightsForTargets: is127 }; - }); - }; + public nearSearch = () => this.getSearcher().then((search) => ({ search })); public nearVector = (vec: NearVectorInputType, opts?: BaseNearOptions) => { - return Promise.all([ - this.getSearcher(), - this.checkSupportForMultiTargetSearch(opts), - this.checkSupportForMultiVectorSearch(vec), - this.checkSupportForMultiVectorPerTargetSearch(vec), - this.checkSupportForMultiWeightPerTargetSearch(opts), - this.checkSupportForVectors(vec), - this.checkSupportForNamedVectors(opts), - ]).then( - ([ - search, - supportsMultiTarget, - supportsMultiVector, - supportsVectorsForTargets, - supportsWeightsForTargets, - supportsVectors, - ]) => { - const is126 = supportsMultiTarget || supportsMultiVector; - const is127 = supportsVectorsForTargets || supportsWeightsForTargets; + return Promise.all([this.getSearcher(), this.checkSupportForVectors(vec)]).then( + ([search, supportsVectors]) => { const is129 = supportsVectors; return { search, - supportsTargets: is126 || is127, - supportsVectorsForTargets: is127, - supportsWeightsForTargets: is127, supportsVectors: is129, }; } @@ -167,58 +71,20 @@ export class Check { }; public hybridSearch = (opts?: BaseHybridOptions) => { - return Promise.all([ - this.getSearcher(), - this.checkSupportForMultiTargetSearch(opts), - this.checkSupportForMultiVectorSearch(opts?.vector), - this.checkSupportForMultiVectorPerTargetSearch(opts?.vector), - this.checkSupportForMultiWeightPerTargetSearch(opts), - this.checkSupportForVectors(opts?.vector), - this.checkSupportForNamedVectors(opts), - this.checkSupportForBm25AndHybridGroupByQueries('Hybrid', opts), - this.checkSupportForHybridNearTextAndNearVectorSubSearches(opts), - ]).then( - ([ - search, - supportsMultiTarget, - supportsMultiVector, - supportsWeightsForTargets, - supportsVectorsForTargets, - supportsVectors, - ]) => { - const is126 = supportsMultiTarget || supportsMultiVector; - const is127 = supportsVectorsForTargets || supportsWeightsForTargets; + return Promise.all([this.getSearcher(), this.checkSupportForVectors(opts?.vector)]).then( + ([search, supportsVectors]) => { const is129 = supportsVectors; return { search, - supportsTargets: is126 || is127, - supportsWeightsForTargets: is127, - supportsVectorsForTargets: is127, supportsVectors: is129, }; } ); }; - public fetchObjects = (opts?: FetchObjectsOptions) => { - return Promise.all([this.getSearcher(), this.checkSupportForNamedVectors(opts)]).then(([search]) => { - return { search }; - }); - }; + public fetchObjects = () => this.getSearcher().then((search) => ({ search })); - public fetchObjectById = (opts?: FetchObjectByIdOptions) => { - return Promise.all([this.getSearcher(), this.checkSupportForNamedVectors(opts)]).then(([search]) => { - return { search }; - }); - }; + public fetchObjectById = () => this.getSearcher().then((search) => ({ search })); - public bm25 = (opts?: BaseBm25Options) => { - return Promise.all([ - this.getSearcher(), - this.checkSupportForNamedVectors(opts), - this.checkSupportForBm25AndHybridGroupByQueries('Bm25', opts), - ]).then(([search]) => { - return { search }; - }); - }; + public bm25 = () => this.getSearcher().then((search) => ({ search })); } diff --git a/src/collections/query/index.ts b/src/collections/query/index.ts index d288f0af..474bfb36 100644 --- a/src/collections/query/index.ts +++ b/src/collections/query/index.ts @@ -80,7 +80,7 @@ class QueryManager implements Query { opts?: FetchObjectByIdOptions ): Promise | null> { return this.check - .fetchObjectById(opts) + .fetchObjectById() .then(({ search }) => search.withFetch(Serialize.search.fetchObjectById({ id, ...opts }))) .then((reply) => this.parseReply(reply)) .then((ret) => (ret.objects.length === 1 ? ret.objects[0] : null)); @@ -90,7 +90,7 @@ class QueryManager implements Query { opts?: FetchObjectsOptions ): Promise> { return this.check - .fetchObjects(opts) + .fetchObjects() .then(({ search }) => search.withFetch(Serialize.search.fetchObjects(opts))) .then((reply) => this.parseReply(reply)); } @@ -108,7 +108,7 @@ class QueryManager implements Query { opts?: Bm25Options ): QueryReturn { return this.check - .bm25(opts) + .bm25() .then(({ search }) => search.withBm25(Serialize.search.bm25(query, opts))) .then((reply) => this.parseGroupByReply(opts, reply)); } @@ -127,27 +127,16 @@ class QueryManager implements Query { ): QueryReturn { return this.check .hybridSearch(opts) - .then( - async ({ - search, - supportsTargets, - supportsWeightsForTargets, - supportsVectorsForTargets, - supportsVectors, - }) => ({ - search, - args: await Serialize.search.hybrid( - { - query, - supportsTargets, - supportsWeightsForTargets, - supportsVectorsForTargets, - supportsVectors, - }, - opts - ), - }) - ) + .then(async ({ search, supportsVectors }) => ({ + search, + args: await Serialize.search.hybrid( + { + query, + supportsVectors, + }, + opts + ), + })) .then(({ search, args }) => search.withHybrid(args)) .then((reply) => this.parseGroupByReply(opts, reply)); } @@ -165,15 +154,13 @@ class QueryManager implements Query { opts?: NearOptions ): QueryReturn { return this.check - .nearSearch(opts) - .then(({ search, supportsTargets, supportsWeightsForTargets }) => { + .nearSearch() + .then(({ search }) => { return toBase64FromMedia(image).then((image) => ({ search, args: Serialize.search.nearImage( { image, - supportsTargets, - supportsWeightsForTargets, }, opts ), @@ -199,35 +186,27 @@ class QueryManager implements Query { opts?: NearOptions ): QueryReturn { return this.check - .nearSearch(opts) - .then(({ search, supportsTargets, supportsWeightsForTargets }) => { - const args = { - supportsTargets, - supportsWeightsForTargets, - }; + .nearSearch() + .then(({ search }) => { let send: (media: string) => Promise; switch (type) { case 'audio': - send = (media) => - search.withNearAudio(Serialize.search.nearAudio({ audio: media, ...args }, opts)); + send = (media) => search.withNearAudio(Serialize.search.nearAudio({ audio: media }, opts)); break; case 'depth': - send = (media) => - search.withNearDepth(Serialize.search.nearDepth({ depth: media, ...args }, opts)); + send = (media) => search.withNearDepth(Serialize.search.nearDepth({ depth: media }, opts)); break; case 'image': - send = (media) => - search.withNearImage(Serialize.search.nearImage({ image: media, ...args }, opts)); + send = (media) => search.withNearImage(Serialize.search.nearImage({ image: media }, opts)); break; case 'imu': - send = (media) => search.withNearIMU(Serialize.search.nearIMU({ imu: media, ...args }, opts)); + send = (media) => search.withNearIMU(Serialize.search.nearIMU({ imu: media }, opts)); break; case 'thermal': - send = (media) => - search.withNearThermal(Serialize.search.nearThermal({ thermal: media, ...args }, opts)); + send = (media) => search.withNearThermal(Serialize.search.nearThermal({ thermal: media }, opts)); break; case 'video': - send = (media) => search.withNearVideo(Serialize.search.nearVideo({ video: media, ...args })); + send = (media) => search.withNearVideo(Serialize.search.nearVideo({ video: media })); break; default: throw new WeaviateInvalidInputError(`Invalid media type: ${type}`); @@ -250,14 +229,12 @@ class QueryManager implements Query { opts?: NearOptions ): QueryReturn { return this.check - .nearSearch(opts) - .then(({ search, supportsTargets, supportsWeightsForTargets }) => ({ + .nearSearch() + .then(({ search }) => ({ search, args: Serialize.search.nearObject( { id, - supportsTargets, - supportsWeightsForTargets, }, opts ), @@ -279,14 +256,12 @@ class QueryManager implements Query { opts?: NearTextOptions ): QueryReturn { return this.check - .nearSearch(opts) - .then(({ search, supportsTargets, supportsWeightsForTargets }) => ({ + .nearSearch() + .then(({ search }) => ({ search, args: Serialize.search.nearText( { query, - supportsTargets, - supportsWeightsForTargets, }, opts ), @@ -309,27 +284,16 @@ class QueryManager implements Query { ): QueryReturn { return this.check .nearVector(vector, opts) - .then( - async ({ - search, - supportsTargets, - supportsVectorsForTargets, - supportsWeightsForTargets, - supportsVectors, - }) => ({ - search, - args: await Serialize.search.nearVector( - { - vector, - supportsTargets, - supportsVectorsForTargets, - supportsWeightsForTargets, - supportsVectors, - }, - opts - ), - }) - ) + .then(async ({ search, supportsVectors }) => ({ + search, + args: await Serialize.search.nearVector( + { + vector, + supportsVectors, + }, + opts + ), + })) .then(({ search, args }) => search.withNearVector(args)) .then((reply) => this.parseGroupByReply(opts, reply)); } diff --git a/src/collections/serialize/index.ts b/src/collections/serialize/index.ts index d9df1847..b30d8c72 100644 --- a/src/collections/serialize/index.ts +++ b/src/collections/serialize/index.ts @@ -426,9 +426,6 @@ class Aggregate { objectLimit: opts?.objectLimit, hybrid: await Serialize.hybridSearch({ query: query, - supportsTargets: true, - supportsVectorsForTargets: true, - supportsWeightsForTargets: true, supportsVectors: true, ...opts, }), @@ -444,8 +441,6 @@ class Aggregate { objectLimit: opts?.objectLimit, nearImage: Serialize.nearImageSearch({ image, - supportsTargets: true, - supportsWeightsForTargets: true, ...opts, }), }; @@ -460,8 +455,6 @@ class Aggregate { objectLimit: opts?.objectLimit, nearObject: Serialize.nearObjectSearch({ id, - supportsTargets: true, - supportsWeightsForTargets: true, ...opts, }), }; @@ -476,8 +469,6 @@ class Aggregate { objectLimit: opts?.objectLimit, nearText: Serialize.nearTextSearch({ query, - supportsTargets: true, - supportsWeightsForTargets: true, ...opts, }), }; @@ -492,9 +483,6 @@ class Aggregate { objectLimit: opts?.objectLimit, nearVector: await Serialize.nearVectorSearch({ vector, - supportsTargets: true, - supportsVectorsForTargets: true, - supportsWeightsForTargets: true, supportsVectors: true, ...opts, }), @@ -669,9 +657,6 @@ class Search { public static hybrid = async ( args: { query: string; - supportsTargets: boolean; - supportsVectorsForTargets: boolean; - supportsWeightsForTargets: boolean; supportsVectors: boolean; }, opts?: HybridOptions @@ -688,8 +673,6 @@ class Search { public static nearAudio = ( args: { audio: string; - supportsTargets: boolean; - supportsWeightsForTargets: boolean; }, opts?: NearOptions ): SearchNearAudioArgs => { @@ -703,8 +686,6 @@ class Search { public static nearDepth = ( args: { depth: string; - supportsTargets: boolean; - supportsWeightsForTargets: boolean; }, opts?: NearOptions ): SearchNearDepthArgs => { @@ -718,8 +699,6 @@ class Search { public static nearImage = ( args: { image: string; - supportsTargets: boolean; - supportsWeightsForTargets: boolean; }, opts?: NearOptions ): SearchNearImageArgs => { @@ -733,8 +712,6 @@ class Search { public static nearIMU = ( args: { imu: string; - supportsTargets: boolean; - supportsWeightsForTargets: boolean; }, opts?: NearOptions ): SearchNearIMUArgs => { @@ -746,7 +723,7 @@ class Search { }; public static nearObject = ( - args: { id: string; supportsTargets: boolean; supportsWeightsForTargets: boolean }, + args: { id: string }, opts?: NearOptions ): SearchNearObjectArgs => { return { @@ -759,8 +736,6 @@ class Search { public static nearText = ( args: { query: string | string[]; - supportsTargets: boolean; - supportsWeightsForTargets: boolean; }, opts?: NearTextOptions ): SearchNearTextArgs => { @@ -772,7 +747,7 @@ class Search { }; public static nearThermal = ( - args: { thermal: string; supportsTargets: boolean; supportsWeightsForTargets: boolean }, + args: { thermal: string }, opts?: NearOptions ): SearchNearThermalArgs => { return { @@ -785,9 +760,6 @@ class Search { public static nearVector = async ( args: { vector: NearVectorInputType; - supportsTargets: boolean; - supportsVectorsForTargets: boolean; - supportsWeightsForTargets: boolean; supportsVectors: boolean; }, opts?: NearOptions @@ -799,7 +771,7 @@ class Search { }; }; public static nearVideo = ( - args: { video: string; supportsTargets: boolean; supportsWeightsForTargets: boolean }, + args: { video: string }, opts?: NearOptions ): SearchNearVideoArgs => { return { @@ -1026,11 +998,9 @@ export class Serialize { }; private static hybridVector = async (args: { - supportsTargets: boolean; - supportsVectorsForTargets: boolean; - supportsWeightsForTargets: boolean; supportsVectors: boolean; vector?: BaseHybridOptions['vector']; + targetVector?: BaseHybridOptions['targetVector']; }) => { const vector = args.vector; if (Serialize.isHybridVectorSearch(vector)) { @@ -1095,9 +1065,6 @@ export class Serialize { public static hybridSearch = async ( args: { query: string; - supportsTargets: boolean; - supportsVectorsForTargets: boolean; - supportsWeightsForTargets: boolean; supportsVectors: boolean; } & HybridSearchOptions ): Promise => { @@ -1130,11 +1097,7 @@ export class Serialize { }; public static nearAudioSearch = ( - args: { audio: string; supportsTargets: boolean; supportsWeightsForTargets: boolean } & NearOptions< - T, - V, - I - > + args: { audio: string } & NearOptions ): NearAudioSearch => { const { targets, targetVectors } = Serialize.targetVector(args); return NearAudioSearch.fromPartial({ @@ -1147,11 +1110,7 @@ export class Serialize { }; public static nearDepthSearch = ( - args: { depth: string; supportsTargets: boolean; supportsWeightsForTargets: boolean } & NearOptions< - T, - V, - I - > + args: { depth: string } & NearOptions ): NearDepthSearch => { const { targets, targetVectors } = Serialize.targetVector(args); return NearDepthSearch.fromPartial({ @@ -1164,11 +1123,7 @@ export class Serialize { }; public static nearImageSearch = ( - args: { image: string; supportsTargets: boolean; supportsWeightsForTargets: boolean } & NearOptions< - T, - V, - I - > + args: { image: string } & NearOptions ): NearImageSearch => { const { targets, targetVectors } = Serialize.targetVector(args); return NearImageSearch.fromPartial({ @@ -1180,9 +1135,7 @@ export class Serialize { }); }; - public static nearIMUSearch = ( - args: { imu: string; supportsTargets: boolean; supportsWeightsForTargets: boolean } & NearOptions - ): NearIMUSearch => { + public static nearIMUSearch = (args: { imu: string } & NearOptions): NearIMUSearch => { const { targets, targetVectors } = Serialize.targetVector(args); return NearIMUSearch.fromPartial({ imu: args.imu, @@ -1193,9 +1146,7 @@ export class Serialize { }); }; - public static nearObjectSearch = ( - args: { id: string; supportsTargets: boolean; supportsWeightsForTargets: boolean } & NearOptions - ): NearObject => { + public static nearObjectSearch = (args: { id: string } & NearOptions): NearObject => { const { targets, targetVectors } = Serialize.targetVector(args); return NearObject.fromPartial({ id: args.id, @@ -1208,8 +1159,6 @@ export class Serialize { public static nearTextSearch = (args: { query: string | string[]; - supportsTargets: boolean; - supportsWeightsForTargets: boolean; targetVector?: TargetVectorInputType; certainty?: number; distance?: number; @@ -1241,11 +1190,7 @@ export class Serialize { }; public static nearThermalSearch = ( - args: { thermal: string; supportsTargets: boolean; supportsWeightsForTargets: boolean } & NearOptions< - T, - V, - I - > + args: { thermal: string } & NearOptions ): NearThermalSearch => { const { targets, targetVectors } = Serialize.targetVector(args); return NearThermalSearch.fromPartial({ @@ -1301,9 +1246,6 @@ export class Serialize { public static nearVectorSearch = async (args: { vector: NearVectorInputType; - supportsTargets: boolean; - supportsVectorsForTargets: boolean; - supportsWeightsForTargets: boolean; supportsVectors: boolean; certainty?: number; distance?: number; @@ -1318,38 +1260,29 @@ export class Serialize { })), }); - public static targetVector = (args: { - supportsTargets: boolean; - supportsWeightsForTargets: boolean; + public static targetVector = (args?: { targetVector?: TargetVectorInputType; }): { targets?: Targets; targetVectors?: string[] } => { - if (args.targetVector === undefined) { + if (args?.targetVector === undefined) { return {}; } else if (TargetVectorInputGuards.isSingle(args.targetVector)) { - return args.supportsTargets - ? { - targets: Targets.fromPartial({ - targetVectors: [args.targetVector], - }), - } - : { targetVectors: [args.targetVector] }; + return { + targets: Targets.fromPartial({ + targetVectors: [args.targetVector], + }), + }; } else if (TargetVectorInputGuards.isMulti(args.targetVector)) { - return args.supportsTargets - ? { - targets: Targets.fromPartial({ - targetVectors: args.targetVector, - }), - } - : { targetVectors: args.targetVector }; + return { + targets: Targets.fromPartial({ + targetVectors: args.targetVector, + }), + }; } else { - return { targets: Serialize.targets(args.targetVector, args.supportsWeightsForTargets) }; + return { targets: Serialize.targets(args.targetVector) }; } }; static vectors = async (args: { - supportsTargets: boolean; - supportsVectorsForTargets: boolean; - supportsWeightsForTargets: boolean; supportsVectors: boolean; argumentName: 'nearVector' | 'vector'; targetVector?: TargetVectorInputType; @@ -1375,40 +1308,6 @@ export class Serialize { if (Object.keys(args.vector).length === 0) { throw invalidVectorError; } - if (!args.supportsVectorsForTargets) { - const vectorPerTarget: Record = {}; - Object.entries(args.vector).forEach(([k, v]) => { - if (ArrayInputGuards.is2DArray(v)) { - throw new WeaviateUnsupportedFeatureError('Multi-vectors are not supported in Weaviate <1.29.0'); - } - if (NearVectorInputGuards.isListOf1D(v) || NearVectorInputGuards.isListOf2D(v)) { - throw new WeaviateUnsupportedFeatureError( - 'Lists of vectors are not supported in Weaviate <1.29.0' - ); - } - vectorPerTarget[k] = Serialize.vectorToBytes(v); - }); - if (args.targetVector !== undefined) { - const { targets, targetVectors } = Serialize.targetVector(args); - return { - targetVectors, - targets, - vectorPerTarget, - }; - } else { - return args.supportsTargets - ? { - targets: Targets.fromPartial({ - targetVectors: Object.keys(vectorPerTarget), - }), - vectorPerTarget, - } - : { - targetVectors: Object.keys(vectorPerTarget), - vectorPerTarget, - }; - } - } const vectorForTargets: VectorForTarget[] = []; for (const [target, vector] of Object.entries(args.vector)) { if (!args.supportsVectors) { @@ -1521,12 +1420,10 @@ export class Serialize { }; private static targets = ( - targets: MultiTargetVectorJoin, - supportsWeightsForTargets: boolean + targets: MultiTargetVectorJoin ): { combination: CombinationMethod; targetVectors: string[]; - weights: Record; weightsForTargets: WeightsForTarget[]; } => { let combination: CombinationMethod; @@ -1549,7 +1446,7 @@ export class Serialize { default: throw new Error('Invalid combination method'); } - if (targets.weights !== undefined && supportsWeightsForTargets) { + if (targets.weights !== undefined) { const weightsForTargets: WeightsForTarget[] = Object.entries(targets.weights) .map(([target, weight]) => { return { @@ -1565,37 +1462,19 @@ export class Serialize { return { combination, targetVectors: weightsForTargets.map((w) => w.target), - weights: {}, weightsForTargets, }; - } else if (targets.weights !== undefined && !supportsWeightsForTargets) { - if (Object.values(targets.weights).some((v) => Array.isArray(v))) { - throw new WeaviateUnsupportedFeatureError( - 'Multiple weights per target are not supported in this Weaviate version. Please upgrade to at least Weaviate 1.27.0.' - ); - } - return { - combination, - targetVectors: targets.targetVectors, - weights: targets.weights as Record, // TS can't type narrow the complicated .some predicate above - weightsForTargets: [], - }; } else { return { combination, targetVectors: targets.targetVectors, - weights: {}, weightsForTargets: [], }; } }; public static nearVideoSearch = ( - args: { video: string; supportsTargets: boolean; supportsWeightsForTargets: boolean } & NearOptions< - T, - V, - I - > + args: { video: string } & NearOptions ): NearVideoSearch => { const { targets, targetVectors } = Serialize.targetVector(args); return NearVideoSearch.fromPartial({ diff --git a/src/collections/serialize/unit.test.ts b/src/collections/serialize/unit.test.ts index e6f12764..669c34e0 100644 --- a/src/collections/serialize/unit.test.ts +++ b/src/collections/serialize/unit.test.ts @@ -147,9 +147,6 @@ describe('Unit testing of Serialize', () => { const args = await Serialize.search.hybrid( { query: 'test', - supportsTargets: false, - supportsVectorsForTargets: false, - supportsWeightsForTargets: false, supportsVectors: false, }, { @@ -167,7 +164,9 @@ describe('Unit testing of Serialize', () => { properties: ['name'], alpha: 0.6, vectorBytes: new Uint8Array(new Float32Array([1, 2, 3]).buffer), - targetVectors: ['title'], + targets: { + targetVectors: ['title'], + }, fusionType: Hybrid_FusionType.FUSION_TYPE_RANKED, vectorDistance: 0.4, }), @@ -179,9 +178,6 @@ describe('Unit testing of Serialize', () => { const args = await Serialize.search.hybrid( { query: 'test', - supportsTargets: true, - supportsVectorsForTargets: true, - supportsWeightsForTargets: true, supportsVectors: true, }, { @@ -218,9 +214,6 @@ describe('Unit testing of Serialize', () => { const args = await Serialize.search.hybrid( { query: 'test', - supportsTargets: true, - supportsVectorsForTargets: true, - supportsWeightsForTargets: true, supportsVectors: false, }, { @@ -277,8 +270,6 @@ describe('Unit testing of Serialize', () => { const args = Serialize.search.nearAudio( { audio: 'audio', - supportsTargets: false, - supportsWeightsForTargets: false, }, { certainty: 0.6, @@ -291,7 +282,9 @@ describe('Unit testing of Serialize', () => { audio: 'audio', certainty: 0.6, distance: 0.4, - targetVectors: ['audio'], + targets: { + targetVectors: ['audio'], + }, }), metadata: MetadataRequest.fromPartial({ uuid: true }), }); @@ -300,8 +293,6 @@ describe('Unit testing of Serialize', () => { it('should parse args for nearDepth', () => { const args = Serialize.search.nearDepth({ depth: 'depth', - supportsTargets: false, - supportsWeightsForTargets: false, }); expect(args).toEqual({ nearDepth: NearDepthSearch.fromPartial({ @@ -314,8 +305,6 @@ describe('Unit testing of Serialize', () => { it('should parse args for nearIMU', () => { const args = Serialize.search.nearIMU({ imu: 'imu', - supportsTargets: false, - supportsWeightsForTargets: false, }); expect(args).toEqual({ nearIMU: NearIMUSearch.fromPartial({ @@ -328,8 +317,6 @@ describe('Unit testing of Serialize', () => { it('should parse args for nearImage', () => { const args = Serialize.search.nearImage({ image: 'image', - supportsTargets: false, - supportsWeightsForTargets: false, }); expect(args).toEqual({ nearImage: NearImageSearch.fromPartial({ @@ -342,8 +329,6 @@ describe('Unit testing of Serialize', () => { it('should parse args for nearObject', () => { const args = Serialize.search.nearObject({ id: 'id', - supportsTargets: false, - supportsWeightsForTargets: false, }); expect(args).toEqual({ nearObject: NearObject.fromPartial({ @@ -357,8 +342,6 @@ describe('Unit testing of Serialize', () => { const args = Serialize.search.nearText( { query: 'test', - supportsTargets: false, - supportsWeightsForTargets: false, }, { moveAway: { @@ -394,8 +377,6 @@ describe('Unit testing of Serialize', () => { it('should parse args for nearThermal', () => { const args = Serialize.search.nearThermal({ thermal: 'thermal', - supportsTargets: false, - supportsWeightsForTargets: false, }); expect(args).toEqual({ nearThermal: NearThermalSearch.fromPartial({ @@ -408,9 +389,6 @@ describe('Unit testing of Serialize', () => { it('should parse args for nearVector with single vector <1.29', async () => { const args = await Serialize.search.nearVector({ vector: [1, 2, 3], - supportsTargets: false, - supportsVectorsForTargets: false, - supportsWeightsForTargets: false, supportsVectors: false, }); expect(args).toEqual({ @@ -424,9 +402,6 @@ describe('Unit testing of Serialize', () => { it('should parse args for nearVector with single vector >=1.29', async () => { const args = await Serialize.search.nearVector({ vector: [1, 2, 3], - supportsTargets: false, - supportsVectorsForTargets: false, - supportsWeightsForTargets: false, supportsVectors: true, }); expect(args).toEqual({ @@ -442,29 +417,6 @@ describe('Unit testing of Serialize', () => { }); }); - it('should parse args for nearVector with two named vectors and supportsTargets (<1.27.0)', async () => { - const args = await Serialize.search.nearVector({ - vector: { - a: [1, 2, 3], - b: [4, 5, 6], - }, - supportsTargets: true, - supportsVectorsForTargets: false, - supportsWeightsForTargets: false, - supportsVectors: false, - }); - expect(args).toEqual({ - nearVector: NearVector.fromPartial({ - vectorPerTarget: { - a: new Uint8Array(new Float32Array([1, 2, 3]).buffer), - b: new Uint8Array(new Float32Array([4, 5, 6]).buffer), - }, - targets: { targetVectors: ['a', 'b'] }, - }), - metadata: MetadataRequest.fromPartial({ uuid: true }), - }); - }); - it('should parse args for nearVector with two named vectors and all supports (==1.27.x)', async () => { const args = await Serialize.search.nearVector({ vector: { @@ -474,9 +426,6 @@ describe('Unit testing of Serialize', () => { ], b: [7, 8, 9], }, - supportsTargets: true, - supportsVectorsForTargets: true, - supportsWeightsForTargets: true, supportsVectors: false, }); expect(args).toEqual({ @@ -495,8 +444,6 @@ describe('Unit testing of Serialize', () => { it('should parse args for nearVideo', () => { const args = Serialize.search.nearVideo({ video: 'video', - supportsTargets: false, - supportsWeightsForTargets: false, }); expect(args).toEqual({ nearVideo: NearVideoSearch.fromPartial({ @@ -725,16 +672,12 @@ describe('Unit testing of Serialize', () => { type Test = { name: string; targetVector: TargetVectorInputType; - supportsTargets: boolean; - supportsWeightsForTargets: boolean; out: Out; }; const tests: Test[] = [ { name: 'should parse single target vector into targets', targetVector: 'a', - supportsTargets: true, - supportsWeightsForTargets: false, out: { targets: Targets.fromPartial({ targetVectors: ['a'] }), }, @@ -742,8 +685,6 @@ describe('Unit testing of Serialize', () => { { name: 'should parse list of target vectors into targets', targetVector: ['a', 'b'], - supportsTargets: true, - supportsWeightsForTargets: false, out: { targets: Targets.fromPartial({ targetVectors: ['a', 'b'] }), }, @@ -751,8 +692,6 @@ describe('Unit testing of Serialize', () => { { name: 'should parse MultiTargetJoin sum', targetVector: multiTargetVector().average(['a', 'b']), - supportsTargets: true, - supportsWeightsForTargets: false, out: { targets: Targets.fromPartial({ combination: CombinationMethod.COMBINATION_METHOD_TYPE_AVERAGE, @@ -760,24 +699,9 @@ describe('Unit testing of Serialize', () => { }), }, }, - { - name: 'should parse MultiTargetJoin manual single weight per target', - targetVector: multiTargetVector().manualWeights({ a: 0.5, b: 0.5 }), - supportsTargets: true, - supportsWeightsForTargets: false, - out: { - targets: Targets.fromPartial({ - combination: CombinationMethod.COMBINATION_METHOD_TYPE_MANUAL, - targetVectors: ['a', 'b'], - weights: { a: 0.5, b: 0.5 }, - }), - }, - }, { name: 'should parse MultiTargetJoin manual multiple weights per target', targetVector: multiTargetVector().manualWeights({ a: [0.5, 0.5], b: 0.5 }), - supportsTargets: true, - supportsWeightsForTargets: true, out: { targets: Targets.fromPartial({ combination: CombinationMethod.COMBINATION_METHOD_TYPE_MANUAL, @@ -793,8 +717,6 @@ describe('Unit testing of Serialize', () => { { name: 'should parse MultiTargetJoin minimum', targetVector: multiTargetVector().minimum(['a', 'b']), - supportsTargets: true, - supportsWeightsForTargets: false, out: { targets: Targets.fromPartial({ combination: CombinationMethod.COMBINATION_METHOD_TYPE_MIN, @@ -805,8 +727,6 @@ describe('Unit testing of Serialize', () => { { name: 'should parse MultiTargetJoin sum', targetVector: multiTargetVector().average(['a', 'b']), - supportsTargets: true, - supportsWeightsForTargets: false, out: { targets: Targets.fromPartial({ combination: CombinationMethod.COMBINATION_METHOD_TYPE_AVERAGE, @@ -814,24 +734,9 @@ describe('Unit testing of Serialize', () => { }), }, }, - { - name: 'should parse MultiTargetJoin relativeScore single weight per target', - targetVector: multiTargetVector().relativeScore({ a: 0.5, b: 0.5 }), - supportsTargets: true, - supportsWeightsForTargets: false, - out: { - targets: Targets.fromPartial({ - combination: CombinationMethod.COMBINATION_METHOD_TYPE_RELATIVE_SCORE, - targetVectors: ['a', 'b'], - weights: { a: 0.5, b: 0.5 }, - }), - }, - }, { name: 'should parse MultiTargetJoin relativeScore multiple weights per target', targetVector: multiTargetVector().relativeScore({ a: [0.5, 0.5], b: 0.5 }), - supportsTargets: true, - supportsWeightsForTargets: true, out: { targets: Targets.fromPartial({ combination: CombinationMethod.COMBINATION_METHOD_TYPE_RELATIVE_SCORE, @@ -847,8 +752,6 @@ describe('Unit testing of Serialize', () => { { name: 'should parse MultiTargetJoin sum', targetVector: multiTargetVector().sum(['a', 'b']), - supportsTargets: true, - supportsWeightsForTargets: false, out: { targets: Targets.fromPartial({ combination: CombinationMethod.COMBINATION_METHOD_TYPE_SUM, diff --git a/src/collections/tenants/index.ts b/src/collections/tenants/index.ts index 12dd4ea0..7f455b28 100644 --- a/src/collections/tenants/index.ts +++ b/src/collections/tenants/index.ts @@ -1,17 +1,12 @@ import { ConnectionGRPC } from '../../connection/index.js'; -import { WeaviateUnexpectedStatusCodeError, WeaviateUnsupportedFeatureError } from '../../errors.js'; +import { WeaviateUnexpectedStatusCodeError } from '../../errors.js'; import { Tenant as TenantREST } from '../../openapi/types.js'; -import { TenantsCreator, TenantsDeleter, TenantsGetter, TenantsUpdater } from '../../schema/index.js'; +import { TenantsCreator, TenantsDeleter, TenantsUpdater } from '../../schema/index.js'; import { DbVersionSupport } from '../../utils/dbVersion.js'; import { Deserialize } from '../deserialize/index.js'; import { Serialize } from '../serialize/index.js'; import { Tenant, TenantBC, TenantBase, TenantCreate, TenantUpdate } from './types.js'; -const checkSupportForGRPCTenantsGetEndpoint = async (dbVersionSupport: DbVersionSupport) => { - const check = await dbVersionSupport.supportsTenantsGetGRPCMethod(); - if (!check.supports) throw new WeaviateUnsupportedFeatureError(check.message); -}; - const parseValueOrValueArray = (value: V | V[]) => (Array.isArray(value) ? value : [value]); const parseStringOrTenant = (tenant: string | T) => @@ -27,20 +22,11 @@ const tenants = ( collection: string, dbVersionSupport: DbVersionSupport ): Tenants => { - const getGRPC = (names?: string[]) => - checkSupportForGRPCTenantsGetEndpoint(dbVersionSupport) - .then(() => connection.tenants(collection)) + const get = (names?: string[]) => + connection + .tenants(collection) .then((builder) => builder.withGet({ names })) .then(Deserialize.tenantsGet); - const getREST = () => - new TenantsGetter(connection, collection).do().then((tenants) => { - const result: Record = {}; - tenants.forEach((tenant) => { - if (!tenant.name) return; - result[tenant.name] = parseTenantREST(tenant); - }); - return result; - }); const update = async (tenants: TenantBC | TenantUpdate | (TenantBC | TenantUpdate)[]) => { const out: Tenant[] = []; for await (const res of Serialize.tenants(parseValueOrValueArray(tenants), Serialize.tenantUpdate).map( @@ -56,15 +42,12 @@ const tenants = ( new TenantsCreator(connection, collection, parseValueOrValueArray(tenants).map(Serialize.tenantCreate)) .do() .then((res) => res.map(parseTenantREST)), - get: async function () { - const check = await dbVersionSupport.supportsTenantsGetGRPCMethod(); - return check.supports ? getGRPC() : getREST(); - }, - getByNames: (tenants) => getGRPC(tenants.map(parseStringOrTenant)), + get: () => get(), + getByNames: (tenants) => get(tenants.map(parseStringOrTenant)), getByName: async (tenant) => { const tenantName = parseStringOrTenant(tenant); if (await dbVersionSupport.supportsTenantGetRESTMethod().then((check) => !check.supports)) { - return getGRPC([tenantName]).then((tenants) => tenants[tenantName] ?? null); + return get([tenantName]).then((tenants) => tenants[tenantName] ?? null); } return connection .get(`/schema/${collection}/tenants/${tenantName}`) diff --git a/src/collections/tenants/unit.test.ts b/src/collections/tenants/unit.test.ts index 6b160bf1..fa01c8c8 100644 --- a/src/collections/tenants/unit.test.ts +++ b/src/collections/tenants/unit.test.ts @@ -77,7 +77,7 @@ describe('Mock testing of tenants.get() method with a REST server', () => { }; beforeAll(async () => { - servers = await makeMockServers('1.24.0', 8954, 'localhost:8955'); + servers = await makeMockServers('1.27.0', 8954, 'localhost:8955'); }); it('should get mocked tenants', async () => { @@ -103,7 +103,7 @@ describe('Mock testing of tenants.get() method with a gRPC server', () => { }; beforeAll(async () => { - servers = await makeMockServers('1.25.0', 8956, 'localhost:8957'); + servers = await makeMockServers('1.27.0', 8956, 'localhost:8957'); }); it('should get the mocked tenants', async () => { diff --git a/src/openapi/schema.ts b/src/openapi/schema.ts index 9908f40d..409fbfbe 100644 --- a/src/openapi/schema.ts +++ b/src/openapi/schema.ts @@ -110,6 +110,10 @@ export interface paths { '/authz/roles/{id}/user-assignments': { get: operations['getUsersForRole']; }; + '/authz/roles/{id}/group-assignments': { + /** Retrieves a list of all groups that have been assigned a specific role, identified by its name. */ + get: operations['getGroupsForRole']; + }; '/authz/users/{id}/roles': { get: operations['getRolesForUserDeprecated']; }; @@ -128,6 +132,14 @@ export interface paths { '/authz/groups/{id}/revoke': { post: operations['revokeRoleFromGroup']; }; + '/authz/groups/{id}/roles/{groupType}': { + /** Retrieves a list of all roles assigned to a specific group. The group must be identified by both its name (`id`) and its type (`db` or `oidc`). */ + get: operations['getRolesForGroup']; + }; + '/authz/groups/{groupType}': { + /** Retrieves a list of all available group names for a specified group type (`oidc` or `db`). */ + get: operations['getGroups']; + }; '/objects': { /** Lists all Objects in reverse order of creation, owned by the user that belongs to the used token. */ get: operations['objects.list']; @@ -303,6 +315,11 @@ export interface definitions { * @enum {string} */ UserTypeInput: 'db' | 'oidc'; + /** + * @description If the group contains OIDC or database users. + * @enum {string} + */ + GroupType: 'db' | 'oidc'; /** * @description the type of user * @enum {string} @@ -399,6 +416,15 @@ export interface definitions { */ users?: string; }; + /** @description Resources applicable for group actions. */ + groups?: { + /** + * @description A string that specifies which groups this permission applies to. Can be an exact group name or a regex pattern. The default value `*` applies the permission to all groups. + * @default * + */ + group?: string; + groupType?: definitions['GroupType']; + }; /** @description resources applicable for tenant actions */ tenants?: { /** @@ -496,7 +522,9 @@ export interface definitions { | 'create_aliases' | 'read_aliases' | 'update_aliases' - | 'delete_aliases'; + | 'delete_aliases' + | 'assign_and_revoke_groups' + | 'read_groups'; }; /** @description list of roles */ RolesListResponse: definitions['Role'][]; @@ -1171,8 +1199,6 @@ export interface definitions { BackupListResponse: { /** @description The ID of the backup. Must be URL-safe and work as a filesystem path, only lowercase, numbers, underscore, minus characters allowed. */ id?: string; - /** @description destination path of backup files proper to selected backend */ - path?: string; /** @description The list of classes for which the existed backup process */ classes?: string[]; /** @@ -1191,6 +1217,8 @@ export interface definitions { exclude?: string[]; /** @description Allows overriding the node names stored in the backup with different ones. Useful when restoring backups to a different environment. */ node_mapping?: { [key: string]: string }; + /** @description Allows ovewriting the collection alias if there is a conflict */ + overwriteAlias?: boolean; }; /** @description The definition of a backup restore response body */ BackupRestoreResponse: { @@ -1789,7 +1817,9 @@ export interface definitions { | 'WithinGeoRange' | 'IsNull' | 'ContainsAny' - | 'ContainsAll'; + | 'ContainsAll' + | 'ContainsNone' + | 'Not'; /** * @description path to the property currently being filtered * @example [ @@ -2827,6 +2857,42 @@ export interface operations { }; }; }; + /** Retrieves a list of all groups that have been assigned a specific role, identified by its name. */ + getGroupsForRole: { + parameters: { + path: { + /** The unique name of the role. */ + id: string; + }; + }; + responses: { + /** Successfully retrieved the list of groups that have the role assigned. */ + 200: { + schema: ({ + groupId?: string; + groupType: definitions['GroupType']; + } & { + name: unknown; + })[]; + }; + /** Bad request */ + 400: { + schema: definitions['ErrorResponse']; + }; + /** Unauthorized or invalid credentials. */ + 401: unknown; + /** Forbidden */ + 403: { + schema: definitions['ErrorResponse']; + }; + /** The specified role was not found. */ + 404: unknown; + /** An error has occurred while trying to fulfill the request. Most likely the ErrorResponse will contain more information about the error. */ + 500: { + schema: definitions['ErrorResponse']; + }; + }; + }; getRolesForUserDeprecated: { parameters: { path: { @@ -2985,6 +3051,7 @@ export interface operations { body: { /** @description the roles that assigned to group */ roles?: string[]; + groupType?: definitions['GroupType']; }; }; }; @@ -3019,6 +3086,7 @@ export interface operations { body: { /** @description the roles that revoked from group */ roles?: string[]; + groupType?: definitions['GroupType']; }; }; }; @@ -3043,6 +3111,80 @@ export interface operations { }; }; }; + /** Retrieves a list of all roles assigned to a specific group. The group must be identified by both its name (`id`) and its type (`db` or `oidc`). */ + getRolesForGroup: { + parameters: { + path: { + /** The unique name of the group. */ + id: string; + /** The type of the group. */ + groupType: 'oidc'; + }; + query: { + /** If true, the response will include the full role definitions with all associated permissions. If false, only role names are returned. */ + includeFullRoles?: boolean; + }; + }; + responses: { + /** A list of roles assigned to the specified group. */ + 200: { + schema: definitions['RolesListResponse']; + }; + /** Bad request */ + 400: { + schema: definitions['ErrorResponse']; + }; + /** Unauthorized or invalid credentials. */ + 401: unknown; + /** Forbidden */ + 403: { + schema: definitions['ErrorResponse']; + }; + /** The specified group was not found. */ + 404: unknown; + /** The request syntax is correct, but the server couldn't process it due to semantic issues. */ + 422: { + schema: definitions['ErrorResponse']; + }; + /** An error has occurred while trying to fulfill the request. Most likely the ErrorResponse will contain more information about the error. */ + 500: { + schema: definitions['ErrorResponse']; + }; + }; + }; + /** Retrieves a list of all available group names for a specified group type (`oidc` or `db`). */ + getGroups: { + parameters: { + path: { + /** The type of group to retrieve. */ + groupType: 'oidc'; + }; + }; + responses: { + /** A list of group names for the specified type. */ + 200: { + schema: string[]; + }; + /** Bad request */ + 400: { + schema: definitions['ErrorResponse']; + }; + /** Unauthorized or invalid credentials. */ + 401: unknown; + /** Forbidden */ + 403: { + schema: definitions['ErrorResponse']; + }; + /** The request syntax is correct, but the server couldn't process it due to semantic issues. */ + 422: { + schema: definitions['ErrorResponse']; + }; + /** An error has occurred while trying to fulfill the request. Most likely the ErrorResponse will contain more information about the error. */ + 500: { + schema: definitions['ErrorResponse']; + }; + }; + }; /** Lists all Objects in reverse order of creation, owned by the user that belongs to the used token. */ 'objects.list': { parameters: { diff --git a/src/proto/v1/base.ts b/src/proto/v1/base.ts index 2fcf3464..96bef64a 100644 --- a/src/proto/v1/base.ts +++ b/src/proto/v1/base.ts @@ -59,6 +59,7 @@ export function consistencyLevelToJSON(object: ConsistencyLevel): string { export interface NumberArrayProperties { /** * will be removed in the future, use vector_bytes + * go client 5.4.1 depends on this field. Only remove after go client is deprecated * * @deprecated */ @@ -158,6 +159,8 @@ export enum Filters_Operator { OPERATOR_IS_NULL = 11, OPERATOR_CONTAINS_ANY = 12, OPERATOR_CONTAINS_ALL = 13, + OPERATOR_CONTAINS_NONE = 14, + OPERATOR_NOT = 15, UNRECOGNIZED = -1, } @@ -205,6 +208,12 @@ export function filters_OperatorFromJSON(object: any): Filters_Operator { case 13: case "OPERATOR_CONTAINS_ALL": return Filters_Operator.OPERATOR_CONTAINS_ALL; + case 14: + case "OPERATOR_CONTAINS_NONE": + return Filters_Operator.OPERATOR_CONTAINS_NONE; + case 15: + case "OPERATOR_NOT": + return Filters_Operator.OPERATOR_NOT; case -1: case "UNRECOGNIZED": default: @@ -242,6 +251,10 @@ export function filters_OperatorToJSON(object: Filters_Operator): string { return "OPERATOR_CONTAINS_ANY"; case Filters_Operator.OPERATOR_CONTAINS_ALL: return "OPERATOR_CONTAINS_ALL"; + case Filters_Operator.OPERATOR_CONTAINS_NONE: + return "OPERATOR_CONTAINS_NONE"; + case Filters_Operator.OPERATOR_NOT: + return "OPERATOR_NOT"; case Filters_Operator.UNRECOGNIZED: default: return "UNRECOGNIZED"; diff --git a/src/proto/v1/base_search.ts b/src/proto/v1/base_search.ts index ef5795e2..e0dc0dcc 100644 --- a/src/proto/v1/base_search.ts +++ b/src/proto/v1/base_search.ts @@ -75,20 +75,9 @@ export interface WeightsForTarget { export interface Targets { targetVectors: string[]; combination: CombinationMethod; - /** - * deprecated in 1.26.2 - use weights_for_targets - * - * @deprecated - */ - weights: { [key: string]: number }; weightsForTargets: WeightsForTarget[]; } -export interface Targets_WeightsEntry { - key: string; - value: number; -} - export interface VectorForTarget { name: string; /** @@ -469,7 +458,7 @@ export const WeightsForTarget = { }; function createBaseTargets(): Targets { - return { targetVectors: [], combination: 0, weights: {}, weightsForTargets: [] }; + return { targetVectors: [], combination: 0, weightsForTargets: [] }; } export const Targets = { @@ -480,9 +469,6 @@ export const Targets = { if (message.combination !== 0) { writer.uint32(16).int32(message.combination); } - Object.entries(message.weights).forEach(([key, value]) => { - Targets_WeightsEntry.encode({ key: key as any, value }, writer.uint32(26).fork()).ldelim(); - }); for (const v of message.weightsForTargets) { WeightsForTarget.encode(v!, writer.uint32(34).fork()).ldelim(); } @@ -510,16 +496,6 @@ export const Targets = { message.combination = reader.int32() as any; continue; - case 3: - if (tag !== 26) { - break; - } - - const entry3 = Targets_WeightsEntry.decode(reader, reader.uint32()); - if (entry3.value !== undefined) { - message.weights[entry3.key] = entry3.value; - } - continue; case 4: if (tag !== 34) { break; @@ -542,12 +518,6 @@ export const Targets = { ? object.targetVectors.map((e: any) => globalThis.String(e)) : [], combination: isSet(object.combination) ? combinationMethodFromJSON(object.combination) : 0, - weights: isObject(object.weights) - ? Object.entries(object.weights).reduce<{ [key: string]: number }>((acc, [key, value]) => { - acc[key] = Number(value); - return acc; - }, {}) - : {}, weightsForTargets: globalThis.Array.isArray(object?.weightsForTargets) ? object.weightsForTargets.map((e: any) => WeightsForTarget.fromJSON(e)) : [], @@ -562,15 +532,6 @@ export const Targets = { if (message.combination !== 0) { obj.combination = combinationMethodToJSON(message.combination); } - if (message.weights) { - const entries = Object.entries(message.weights); - if (entries.length > 0) { - obj.weights = {}; - entries.forEach(([k, v]) => { - obj.weights[k] = v; - }); - } - } if (message.weightsForTargets?.length) { obj.weightsForTargets = message.weightsForTargets.map((e) => WeightsForTarget.toJSON(e)); } @@ -584,91 +545,11 @@ export const Targets = { const message = createBaseTargets(); message.targetVectors = object.targetVectors?.map((e) => e) || []; message.combination = object.combination ?? 0; - message.weights = Object.entries(object.weights ?? {}).reduce<{ [key: string]: number }>((acc, [key, value]) => { - if (value !== undefined) { - acc[key] = globalThis.Number(value); - } - return acc; - }, {}); message.weightsForTargets = object.weightsForTargets?.map((e) => WeightsForTarget.fromPartial(e)) || []; return message; }, }; -function createBaseTargets_WeightsEntry(): Targets_WeightsEntry { - return { key: "", value: 0 }; -} - -export const Targets_WeightsEntry = { - encode(message: Targets_WeightsEntry, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.key !== "") { - writer.uint32(10).string(message.key); - } - if (message.value !== 0) { - writer.uint32(21).float(message.value); - } - return writer; - }, - - decode(input: _m0.Reader | Uint8Array, length?: number): Targets_WeightsEntry { - const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); - let end = length === undefined ? reader.len : reader.pos + length; - const message = createBaseTargets_WeightsEntry(); - while (reader.pos < end) { - const tag = reader.uint32(); - switch (tag >>> 3) { - case 1: - if (tag !== 10) { - break; - } - - message.key = reader.string(); - continue; - case 2: - if (tag !== 21) { - break; - } - - message.value = reader.float(); - continue; - } - if ((tag & 7) === 4 || tag === 0) { - break; - } - reader.skipType(tag & 7); - } - return message; - }, - - fromJSON(object: any): Targets_WeightsEntry { - return { - key: isSet(object.key) ? globalThis.String(object.key) : "", - value: isSet(object.value) ? globalThis.Number(object.value) : 0, - }; - }, - - toJSON(message: Targets_WeightsEntry): unknown { - const obj: any = {}; - if (message.key !== "") { - obj.key = message.key; - } - if (message.value !== 0) { - obj.value = message.value; - } - return obj; - }, - - create(base?: DeepPartial): Targets_WeightsEntry { - return Targets_WeightsEntry.fromPartial(base ?? {}); - }, - fromPartial(object: DeepPartial): Targets_WeightsEntry { - const message = createBaseTargets_WeightsEntry(); - message.key = object.key ?? ""; - message.value = object.value ?? 0; - return message; - }, -}; - function createBaseVectorForTarget(): VectorForTarget { return { name: "", vectorBytes: new Uint8Array(0), vectors: [] }; } diff --git a/src/proto/v1/file_replication.ts b/src/proto/v1/file_replication.ts new file mode 100644 index 00000000..a627d4fc --- /dev/null +++ b/src/proto/v1/file_replication.ts @@ -0,0 +1,1140 @@ +// Code generated by protoc-gen-ts_proto. DO NOT EDIT. +// versions: +// protoc-gen-ts_proto v1.176.0 +// protoc v3.19.1 +// source: v1/file_replication.proto + +/* eslint-disable */ +import Long from "long"; +import { type CallContext, type CallOptions } from "nice-grpc-common"; +import _m0 from "protobufjs/minimal.js"; + +export const protobufPackage = "weaviate.v1"; + +export enum CompressionType { + /** COMPRESSION_TYPE_UNSPECIFIED - No compression */ + COMPRESSION_TYPE_UNSPECIFIED = 0, + /** COMPRESSION_TYPE_GZIP - gzip (compress/gzip) */ + COMPRESSION_TYPE_GZIP = 1, + /** COMPRESSION_TYPE_ZLIB - zlib (compress/zlib) */ + COMPRESSION_TYPE_ZLIB = 2, + /** COMPRESSION_TYPE_DEFLATE - raw DEFLATE (compress/flate) */ + COMPRESSION_TYPE_DEFLATE = 3, + UNRECOGNIZED = -1, +} + +export function compressionTypeFromJSON(object: any): CompressionType { + switch (object) { + case 0: + case "COMPRESSION_TYPE_UNSPECIFIED": + return CompressionType.COMPRESSION_TYPE_UNSPECIFIED; + case 1: + case "COMPRESSION_TYPE_GZIP": + return CompressionType.COMPRESSION_TYPE_GZIP; + case 2: + case "COMPRESSION_TYPE_ZLIB": + return CompressionType.COMPRESSION_TYPE_ZLIB; + case 3: + case "COMPRESSION_TYPE_DEFLATE": + return CompressionType.COMPRESSION_TYPE_DEFLATE; + case -1: + case "UNRECOGNIZED": + default: + return CompressionType.UNRECOGNIZED; + } +} + +export function compressionTypeToJSON(object: CompressionType): string { + switch (object) { + case CompressionType.COMPRESSION_TYPE_UNSPECIFIED: + return "COMPRESSION_TYPE_UNSPECIFIED"; + case CompressionType.COMPRESSION_TYPE_GZIP: + return "COMPRESSION_TYPE_GZIP"; + case CompressionType.COMPRESSION_TYPE_ZLIB: + return "COMPRESSION_TYPE_ZLIB"; + case CompressionType.COMPRESSION_TYPE_DEFLATE: + return "COMPRESSION_TYPE_DEFLATE"; + case CompressionType.UNRECOGNIZED: + default: + return "UNRECOGNIZED"; + } +} + +export interface PauseFileActivityRequest { + indexName: string; + shardName: string; + schemaVersion: number; +} + +export interface PauseFileActivityResponse { + indexName: string; + shardName: string; +} + +export interface ResumeFileActivityRequest { + indexName: string; + shardName: string; +} + +export interface ResumeFileActivityResponse { + indexName: string; + shardName: string; +} + +export interface ListFilesRequest { + indexName: string; + shardName: string; +} + +export interface ListFilesResponse { + indexName: string; + shardName: string; + fileNames: string[]; +} + +export interface GetFileMetadataRequest { + indexName: string; + shardName: string; + fileName: string; +} + +export interface FileMetadata { + indexName: string; + shardName: string; + fileName: string; + size: number; + crc32: number; +} + +export interface GetFileRequest { + indexName: string; + shardName: string; + fileName: string; + /** Requested compression algorithm for streamed chunks */ + compression: CompressionType; +} + +export interface FileChunk { + /** Byte offset in the uncompressed file */ + offset: number; + /** Compressed or raw chunk data */ + data: Uint8Array; + /** Indicates final chunk */ + eof: boolean; +} + +function createBasePauseFileActivityRequest(): PauseFileActivityRequest { + return { indexName: "", shardName: "", schemaVersion: 0 }; +} + +export const PauseFileActivityRequest = { + encode(message: PauseFileActivityRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.indexName !== "") { + writer.uint32(10).string(message.indexName); + } + if (message.shardName !== "") { + writer.uint32(18).string(message.shardName); + } + if (message.schemaVersion !== 0) { + writer.uint32(24).uint64(message.schemaVersion); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PauseFileActivityRequest { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePauseFileActivityRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.indexName = reader.string(); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.shardName = reader.string(); + continue; + case 3: + if (tag !== 24) { + break; + } + + message.schemaVersion = longToNumber(reader.uint64() as Long); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): PauseFileActivityRequest { + return { + indexName: isSet(object.indexName) ? globalThis.String(object.indexName) : "", + shardName: isSet(object.shardName) ? globalThis.String(object.shardName) : "", + schemaVersion: isSet(object.schemaVersion) ? globalThis.Number(object.schemaVersion) : 0, + }; + }, + + toJSON(message: PauseFileActivityRequest): unknown { + const obj: any = {}; + if (message.indexName !== "") { + obj.indexName = message.indexName; + } + if (message.shardName !== "") { + obj.shardName = message.shardName; + } + if (message.schemaVersion !== 0) { + obj.schemaVersion = Math.round(message.schemaVersion); + } + return obj; + }, + + create(base?: DeepPartial): PauseFileActivityRequest { + return PauseFileActivityRequest.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): PauseFileActivityRequest { + const message = createBasePauseFileActivityRequest(); + message.indexName = object.indexName ?? ""; + message.shardName = object.shardName ?? ""; + message.schemaVersion = object.schemaVersion ?? 0; + return message; + }, +}; + +function createBasePauseFileActivityResponse(): PauseFileActivityResponse { + return { indexName: "", shardName: "" }; +} + +export const PauseFileActivityResponse = { + encode(message: PauseFileActivityResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.indexName !== "") { + writer.uint32(10).string(message.indexName); + } + if (message.shardName !== "") { + writer.uint32(18).string(message.shardName); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): PauseFileActivityResponse { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBasePauseFileActivityResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.indexName = reader.string(); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.shardName = reader.string(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): PauseFileActivityResponse { + return { + indexName: isSet(object.indexName) ? globalThis.String(object.indexName) : "", + shardName: isSet(object.shardName) ? globalThis.String(object.shardName) : "", + }; + }, + + toJSON(message: PauseFileActivityResponse): unknown { + const obj: any = {}; + if (message.indexName !== "") { + obj.indexName = message.indexName; + } + if (message.shardName !== "") { + obj.shardName = message.shardName; + } + return obj; + }, + + create(base?: DeepPartial): PauseFileActivityResponse { + return PauseFileActivityResponse.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): PauseFileActivityResponse { + const message = createBasePauseFileActivityResponse(); + message.indexName = object.indexName ?? ""; + message.shardName = object.shardName ?? ""; + return message; + }, +}; + +function createBaseResumeFileActivityRequest(): ResumeFileActivityRequest { + return { indexName: "", shardName: "" }; +} + +export const ResumeFileActivityRequest = { + encode(message: ResumeFileActivityRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.indexName !== "") { + writer.uint32(10).string(message.indexName); + } + if (message.shardName !== "") { + writer.uint32(18).string(message.shardName); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ResumeFileActivityRequest { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseResumeFileActivityRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.indexName = reader.string(); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.shardName = reader.string(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): ResumeFileActivityRequest { + return { + indexName: isSet(object.indexName) ? globalThis.String(object.indexName) : "", + shardName: isSet(object.shardName) ? globalThis.String(object.shardName) : "", + }; + }, + + toJSON(message: ResumeFileActivityRequest): unknown { + const obj: any = {}; + if (message.indexName !== "") { + obj.indexName = message.indexName; + } + if (message.shardName !== "") { + obj.shardName = message.shardName; + } + return obj; + }, + + create(base?: DeepPartial): ResumeFileActivityRequest { + return ResumeFileActivityRequest.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): ResumeFileActivityRequest { + const message = createBaseResumeFileActivityRequest(); + message.indexName = object.indexName ?? ""; + message.shardName = object.shardName ?? ""; + return message; + }, +}; + +function createBaseResumeFileActivityResponse(): ResumeFileActivityResponse { + return { indexName: "", shardName: "" }; +} + +export const ResumeFileActivityResponse = { + encode(message: ResumeFileActivityResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.indexName !== "") { + writer.uint32(10).string(message.indexName); + } + if (message.shardName !== "") { + writer.uint32(18).string(message.shardName); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ResumeFileActivityResponse { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseResumeFileActivityResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.indexName = reader.string(); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.shardName = reader.string(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): ResumeFileActivityResponse { + return { + indexName: isSet(object.indexName) ? globalThis.String(object.indexName) : "", + shardName: isSet(object.shardName) ? globalThis.String(object.shardName) : "", + }; + }, + + toJSON(message: ResumeFileActivityResponse): unknown { + const obj: any = {}; + if (message.indexName !== "") { + obj.indexName = message.indexName; + } + if (message.shardName !== "") { + obj.shardName = message.shardName; + } + return obj; + }, + + create(base?: DeepPartial): ResumeFileActivityResponse { + return ResumeFileActivityResponse.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): ResumeFileActivityResponse { + const message = createBaseResumeFileActivityResponse(); + message.indexName = object.indexName ?? ""; + message.shardName = object.shardName ?? ""; + return message; + }, +}; + +function createBaseListFilesRequest(): ListFilesRequest { + return { indexName: "", shardName: "" }; +} + +export const ListFilesRequest = { + encode(message: ListFilesRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.indexName !== "") { + writer.uint32(10).string(message.indexName); + } + if (message.shardName !== "") { + writer.uint32(18).string(message.shardName); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ListFilesRequest { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseListFilesRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.indexName = reader.string(); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.shardName = reader.string(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): ListFilesRequest { + return { + indexName: isSet(object.indexName) ? globalThis.String(object.indexName) : "", + shardName: isSet(object.shardName) ? globalThis.String(object.shardName) : "", + }; + }, + + toJSON(message: ListFilesRequest): unknown { + const obj: any = {}; + if (message.indexName !== "") { + obj.indexName = message.indexName; + } + if (message.shardName !== "") { + obj.shardName = message.shardName; + } + return obj; + }, + + create(base?: DeepPartial): ListFilesRequest { + return ListFilesRequest.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): ListFilesRequest { + const message = createBaseListFilesRequest(); + message.indexName = object.indexName ?? ""; + message.shardName = object.shardName ?? ""; + return message; + }, +}; + +function createBaseListFilesResponse(): ListFilesResponse { + return { indexName: "", shardName: "", fileNames: [] }; +} + +export const ListFilesResponse = { + encode(message: ListFilesResponse, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.indexName !== "") { + writer.uint32(10).string(message.indexName); + } + if (message.shardName !== "") { + writer.uint32(18).string(message.shardName); + } + for (const v of message.fileNames) { + writer.uint32(26).string(v!); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): ListFilesResponse { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseListFilesResponse(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.indexName = reader.string(); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.shardName = reader.string(); + continue; + case 3: + if (tag !== 26) { + break; + } + + message.fileNames.push(reader.string()); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): ListFilesResponse { + return { + indexName: isSet(object.indexName) ? globalThis.String(object.indexName) : "", + shardName: isSet(object.shardName) ? globalThis.String(object.shardName) : "", + fileNames: globalThis.Array.isArray(object?.fileNames) + ? object.fileNames.map((e: any) => globalThis.String(e)) + : [], + }; + }, + + toJSON(message: ListFilesResponse): unknown { + const obj: any = {}; + if (message.indexName !== "") { + obj.indexName = message.indexName; + } + if (message.shardName !== "") { + obj.shardName = message.shardName; + } + if (message.fileNames?.length) { + obj.fileNames = message.fileNames; + } + return obj; + }, + + create(base?: DeepPartial): ListFilesResponse { + return ListFilesResponse.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): ListFilesResponse { + const message = createBaseListFilesResponse(); + message.indexName = object.indexName ?? ""; + message.shardName = object.shardName ?? ""; + message.fileNames = object.fileNames?.map((e) => e) || []; + return message; + }, +}; + +function createBaseGetFileMetadataRequest(): GetFileMetadataRequest { + return { indexName: "", shardName: "", fileName: "" }; +} + +export const GetFileMetadataRequest = { + encode(message: GetFileMetadataRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.indexName !== "") { + writer.uint32(10).string(message.indexName); + } + if (message.shardName !== "") { + writer.uint32(18).string(message.shardName); + } + if (message.fileName !== "") { + writer.uint32(26).string(message.fileName); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GetFileMetadataRequest { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetFileMetadataRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.indexName = reader.string(); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.shardName = reader.string(); + continue; + case 3: + if (tag !== 26) { + break; + } + + message.fileName = reader.string(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): GetFileMetadataRequest { + return { + indexName: isSet(object.indexName) ? globalThis.String(object.indexName) : "", + shardName: isSet(object.shardName) ? globalThis.String(object.shardName) : "", + fileName: isSet(object.fileName) ? globalThis.String(object.fileName) : "", + }; + }, + + toJSON(message: GetFileMetadataRequest): unknown { + const obj: any = {}; + if (message.indexName !== "") { + obj.indexName = message.indexName; + } + if (message.shardName !== "") { + obj.shardName = message.shardName; + } + if (message.fileName !== "") { + obj.fileName = message.fileName; + } + return obj; + }, + + create(base?: DeepPartial): GetFileMetadataRequest { + return GetFileMetadataRequest.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): GetFileMetadataRequest { + const message = createBaseGetFileMetadataRequest(); + message.indexName = object.indexName ?? ""; + message.shardName = object.shardName ?? ""; + message.fileName = object.fileName ?? ""; + return message; + }, +}; + +function createBaseFileMetadata(): FileMetadata { + return { indexName: "", shardName: "", fileName: "", size: 0, crc32: 0 }; +} + +export const FileMetadata = { + encode(message: FileMetadata, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.indexName !== "") { + writer.uint32(10).string(message.indexName); + } + if (message.shardName !== "") { + writer.uint32(18).string(message.shardName); + } + if (message.fileName !== "") { + writer.uint32(26).string(message.fileName); + } + if (message.size !== 0) { + writer.uint32(32).int64(message.size); + } + if (message.crc32 !== 0) { + writer.uint32(40).uint32(message.crc32); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FileMetadata { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileMetadata(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.indexName = reader.string(); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.shardName = reader.string(); + continue; + case 3: + if (tag !== 26) { + break; + } + + message.fileName = reader.string(); + continue; + case 4: + if (tag !== 32) { + break; + } + + message.size = longToNumber(reader.int64() as Long); + continue; + case 5: + if (tag !== 40) { + break; + } + + message.crc32 = reader.uint32(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): FileMetadata { + return { + indexName: isSet(object.indexName) ? globalThis.String(object.indexName) : "", + shardName: isSet(object.shardName) ? globalThis.String(object.shardName) : "", + fileName: isSet(object.fileName) ? globalThis.String(object.fileName) : "", + size: isSet(object.size) ? globalThis.Number(object.size) : 0, + crc32: isSet(object.crc32) ? globalThis.Number(object.crc32) : 0, + }; + }, + + toJSON(message: FileMetadata): unknown { + const obj: any = {}; + if (message.indexName !== "") { + obj.indexName = message.indexName; + } + if (message.shardName !== "") { + obj.shardName = message.shardName; + } + if (message.fileName !== "") { + obj.fileName = message.fileName; + } + if (message.size !== 0) { + obj.size = Math.round(message.size); + } + if (message.crc32 !== 0) { + obj.crc32 = Math.round(message.crc32); + } + return obj; + }, + + create(base?: DeepPartial): FileMetadata { + return FileMetadata.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): FileMetadata { + const message = createBaseFileMetadata(); + message.indexName = object.indexName ?? ""; + message.shardName = object.shardName ?? ""; + message.fileName = object.fileName ?? ""; + message.size = object.size ?? 0; + message.crc32 = object.crc32 ?? 0; + return message; + }, +}; + +function createBaseGetFileRequest(): GetFileRequest { + return { indexName: "", shardName: "", fileName: "", compression: 0 }; +} + +export const GetFileRequest = { + encode(message: GetFileRequest, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.indexName !== "") { + writer.uint32(10).string(message.indexName); + } + if (message.shardName !== "") { + writer.uint32(18).string(message.shardName); + } + if (message.fileName !== "") { + writer.uint32(26).string(message.fileName); + } + if (message.compression !== 0) { + writer.uint32(32).int32(message.compression); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): GetFileRequest { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseGetFileRequest(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 10) { + break; + } + + message.indexName = reader.string(); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.shardName = reader.string(); + continue; + case 3: + if (tag !== 26) { + break; + } + + message.fileName = reader.string(); + continue; + case 4: + if (tag !== 32) { + break; + } + + message.compression = reader.int32() as any; + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): GetFileRequest { + return { + indexName: isSet(object.indexName) ? globalThis.String(object.indexName) : "", + shardName: isSet(object.shardName) ? globalThis.String(object.shardName) : "", + fileName: isSet(object.fileName) ? globalThis.String(object.fileName) : "", + compression: isSet(object.compression) ? compressionTypeFromJSON(object.compression) : 0, + }; + }, + + toJSON(message: GetFileRequest): unknown { + const obj: any = {}; + if (message.indexName !== "") { + obj.indexName = message.indexName; + } + if (message.shardName !== "") { + obj.shardName = message.shardName; + } + if (message.fileName !== "") { + obj.fileName = message.fileName; + } + if (message.compression !== 0) { + obj.compression = compressionTypeToJSON(message.compression); + } + return obj; + }, + + create(base?: DeepPartial): GetFileRequest { + return GetFileRequest.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): GetFileRequest { + const message = createBaseGetFileRequest(); + message.indexName = object.indexName ?? ""; + message.shardName = object.shardName ?? ""; + message.fileName = object.fileName ?? ""; + message.compression = object.compression ?? 0; + return message; + }, +}; + +function createBaseFileChunk(): FileChunk { + return { offset: 0, data: new Uint8Array(0), eof: false }; +} + +export const FileChunk = { + encode(message: FileChunk, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { + if (message.offset !== 0) { + writer.uint32(8).int64(message.offset); + } + if (message.data.length !== 0) { + writer.uint32(18).bytes(message.data); + } + if (message.eof !== false) { + writer.uint32(24).bool(message.eof); + } + return writer; + }, + + decode(input: _m0.Reader | Uint8Array, length?: number): FileChunk { + const reader = input instanceof _m0.Reader ? input : _m0.Reader.create(input); + let end = length === undefined ? reader.len : reader.pos + length; + const message = createBaseFileChunk(); + while (reader.pos < end) { + const tag = reader.uint32(); + switch (tag >>> 3) { + case 1: + if (tag !== 8) { + break; + } + + message.offset = longToNumber(reader.int64() as Long); + continue; + case 2: + if (tag !== 18) { + break; + } + + message.data = reader.bytes(); + continue; + case 3: + if (tag !== 24) { + break; + } + + message.eof = reader.bool(); + continue; + } + if ((tag & 7) === 4 || tag === 0) { + break; + } + reader.skipType(tag & 7); + } + return message; + }, + + fromJSON(object: any): FileChunk { + return { + offset: isSet(object.offset) ? globalThis.Number(object.offset) : 0, + data: isSet(object.data) ? bytesFromBase64(object.data) : new Uint8Array(0), + eof: isSet(object.eof) ? globalThis.Boolean(object.eof) : false, + }; + }, + + toJSON(message: FileChunk): unknown { + const obj: any = {}; + if (message.offset !== 0) { + obj.offset = Math.round(message.offset); + } + if (message.data.length !== 0) { + obj.data = base64FromBytes(message.data); + } + if (message.eof !== false) { + obj.eof = message.eof; + } + return obj; + }, + + create(base?: DeepPartial): FileChunk { + return FileChunk.fromPartial(base ?? {}); + }, + fromPartial(object: DeepPartial): FileChunk { + const message = createBaseFileChunk(); + message.offset = object.offset ?? 0; + message.data = object.data ?? new Uint8Array(0); + message.eof = object.eof ?? false; + return message; + }, +}; + +export type FileReplicationServiceDefinition = typeof FileReplicationServiceDefinition; +export const FileReplicationServiceDefinition = { + name: "FileReplicationService", + fullName: "weaviate.v1.FileReplicationService", + methods: { + pauseFileActivity: { + name: "PauseFileActivity", + requestType: PauseFileActivityRequest, + requestStream: false, + responseType: PauseFileActivityResponse, + responseStream: false, + options: {}, + }, + resumeFileActivity: { + name: "ResumeFileActivity", + requestType: ResumeFileActivityRequest, + requestStream: false, + responseType: ResumeFileActivityResponse, + responseStream: false, + options: {}, + }, + listFiles: { + name: "ListFiles", + requestType: ListFilesRequest, + requestStream: false, + responseType: ListFilesResponse, + responseStream: false, + options: {}, + }, + getFileMetadata: { + name: "GetFileMetadata", + requestType: GetFileMetadataRequest, + requestStream: true, + responseType: FileMetadata, + responseStream: true, + options: {}, + }, + getFile: { + name: "GetFile", + requestType: GetFileRequest, + requestStream: true, + responseType: FileChunk, + responseStream: true, + options: {}, + }, + }, +} as const; + +export interface FileReplicationServiceImplementation { + pauseFileActivity( + request: PauseFileActivityRequest, + context: CallContext & CallContextExt, + ): Promise>; + resumeFileActivity( + request: ResumeFileActivityRequest, + context: CallContext & CallContextExt, + ): Promise>; + listFiles(request: ListFilesRequest, context: CallContext & CallContextExt): Promise>; + getFileMetadata( + request: AsyncIterable, + context: CallContext & CallContextExt, + ): ServerStreamingMethodResult>; + getFile( + request: AsyncIterable, + context: CallContext & CallContextExt, + ): ServerStreamingMethodResult>; +} + +export interface FileReplicationServiceClient { + pauseFileActivity( + request: DeepPartial, + options?: CallOptions & CallOptionsExt, + ): Promise; + resumeFileActivity( + request: DeepPartial, + options?: CallOptions & CallOptionsExt, + ): Promise; + listFiles(request: DeepPartial, options?: CallOptions & CallOptionsExt): Promise; + getFileMetadata( + request: AsyncIterable>, + options?: CallOptions & CallOptionsExt, + ): AsyncIterable; + getFile( + request: AsyncIterable>, + options?: CallOptions & CallOptionsExt, + ): AsyncIterable; +} + +function bytesFromBase64(b64: string): Uint8Array { + if ((globalThis as any).Buffer) { + return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); + } else { + const bin = globalThis.atob(b64); + const arr = new Uint8Array(bin.length); + for (let i = 0; i < bin.length; ++i) { + arr[i] = bin.charCodeAt(i); + } + return arr; + } +} + +function base64FromBytes(arr: Uint8Array): string { + if ((globalThis as any).Buffer) { + return globalThis.Buffer.from(arr).toString("base64"); + } else { + const bin: string[] = []; + arr.forEach((byte) => { + bin.push(globalThis.String.fromCharCode(byte)); + }); + return globalThis.btoa(bin.join("")); + } +} + +type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined; + +export type DeepPartial = T extends Builtin ? T + : T extends globalThis.Array ? globalThis.Array> + : T extends ReadonlyArray ? ReadonlyArray> + : T extends {} ? { [K in keyof T]?: DeepPartial } + : Partial; + +function longToNumber(long: Long): number { + if (long.gt(globalThis.Number.MAX_SAFE_INTEGER)) { + throw new globalThis.Error("Value is larger than Number.MAX_SAFE_INTEGER"); + } + return long.toNumber(); +} + +if (_m0.util.Long !== Long) { + _m0.util.Long = Long as any; + _m0.configure(); +} + +function isSet(value: any): boolean { + return value !== null && value !== undefined; +} + +export type ServerStreamingMethodResult = { [Symbol.asyncIterator](): AsyncIterator }; diff --git a/src/proto/v1/generative.ts b/src/proto/v1/generative.ts index 25e750c9..7cf1e35c 100644 --- a/src/proto/v1/generative.ts +++ b/src/proto/v1/generative.ts @@ -36,6 +36,7 @@ export interface GenerativeSearch_Grouped { | undefined; /** only allow one at the beginning, but multiple in the future */ queries: GenerativeProvider[]; + debug: boolean; } export interface GenerativeProvider { @@ -576,7 +577,7 @@ export const GenerativeSearch_Single = { }; function createBaseGenerativeSearch_Grouped(): GenerativeSearch_Grouped { - return { task: "", properties: undefined, queries: [] }; + return { task: "", properties: undefined, queries: [], debug: false }; } export const GenerativeSearch_Grouped = { @@ -590,6 +591,9 @@ export const GenerativeSearch_Grouped = { for (const v of message.queries) { GenerativeProvider.encode(v!, writer.uint32(26).fork()).ldelim(); } + if (message.debug !== false) { + writer.uint32(32).bool(message.debug); + } return writer; }, @@ -621,6 +625,13 @@ export const GenerativeSearch_Grouped = { message.queries.push(GenerativeProvider.decode(reader, reader.uint32())); continue; + case 4: + if (tag !== 32) { + break; + } + + message.debug = reader.bool(); + continue; } if ((tag & 7) === 4 || tag === 0) { break; @@ -637,6 +648,7 @@ export const GenerativeSearch_Grouped = { queries: globalThis.Array.isArray(object?.queries) ? object.queries.map((e: any) => GenerativeProvider.fromJSON(e)) : [], + debug: isSet(object.debug) ? globalThis.Boolean(object.debug) : false, }; }, @@ -651,6 +663,9 @@ export const GenerativeSearch_Grouped = { if (message.queries?.length) { obj.queries = message.queries.map((e) => GenerativeProvider.toJSON(e)); } + if (message.debug !== false) { + obj.debug = message.debug; + } return obj; }, @@ -664,6 +679,7 @@ export const GenerativeSearch_Grouped = { ? TextArray.fromPartial(object.properties) : undefined; message.queries = object.queries?.map((e) => GenerativeProvider.fromPartial(e)) || []; + message.debug = object.debug ?? false; return message; }, }; diff --git a/src/proto/v1/properties.ts b/src/proto/v1/properties.ts index 5b7dc122..2d04b558 100644 --- a/src/proto/v1/properties.ts +++ b/src/proto/v1/properties.ts @@ -24,8 +24,7 @@ export interface Value { numberValue?: | number | undefined; - /** @deprecated */ - stringValue?: string | undefined; + /** dont reuse 2, old field that has been removed; Was "string string_value = 2;" */ boolValue?: boolean | undefined; objectValue?: Properties | undefined; listValue?: ListValue | undefined; @@ -40,8 +39,6 @@ export interface Value { } export interface ListValue { - /** @deprecated */ - values: Value[]; numberValues?: NumberValues | undefined; boolValues?: BoolValues | undefined; objectValues?: ObjectValues | undefined; @@ -257,7 +254,6 @@ export const Properties_FieldsEntry = { function createBaseValue(): Value { return { numberValue: undefined, - stringValue: undefined, boolValue: undefined, objectValue: undefined, listValue: undefined, @@ -277,9 +273,6 @@ export const Value = { if (message.numberValue !== undefined) { writer.uint32(9).double(message.numberValue); } - if (message.stringValue !== undefined) { - writer.uint32(18).string(message.stringValue); - } if (message.boolValue !== undefined) { writer.uint32(24).bool(message.boolValue); } @@ -330,13 +323,6 @@ export const Value = { message.numberValue = reader.double(); continue; - case 2: - if (tag !== 18) { - break; - } - - message.stringValue = reader.string(); - continue; case 3: if (tag !== 24) { break; @@ -426,7 +412,6 @@ export const Value = { fromJSON(object: any): Value { return { numberValue: isSet(object.numberValue) ? globalThis.Number(object.numberValue) : undefined, - stringValue: isSet(object.stringValue) ? globalThis.String(object.stringValue) : undefined, boolValue: isSet(object.boolValue) ? globalThis.Boolean(object.boolValue) : undefined, objectValue: isSet(object.objectValue) ? Properties.fromJSON(object.objectValue) : undefined, listValue: isSet(object.listValue) ? ListValue.fromJSON(object.listValue) : undefined, @@ -446,9 +431,6 @@ export const Value = { if (message.numberValue !== undefined) { obj.numberValue = message.numberValue; } - if (message.stringValue !== undefined) { - obj.stringValue = message.stringValue; - } if (message.boolValue !== undefined) { obj.boolValue = message.boolValue; } @@ -491,7 +473,6 @@ export const Value = { fromPartial(object: DeepPartial): Value { const message = createBaseValue(); message.numberValue = object.numberValue ?? undefined; - message.stringValue = object.stringValue ?? undefined; message.boolValue = object.boolValue ?? undefined; message.objectValue = (object.objectValue !== undefined && object.objectValue !== null) ? Properties.fromPartial(object.objectValue) @@ -517,7 +498,6 @@ export const Value = { function createBaseListValue(): ListValue { return { - values: [], numberValues: undefined, boolValues: undefined, objectValues: undefined, @@ -530,9 +510,6 @@ function createBaseListValue(): ListValue { export const ListValue = { encode(message: ListValue, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - for (const v of message.values) { - Value.encode(v!, writer.uint32(10).fork()).ldelim(); - } if (message.numberValues !== undefined) { NumberValues.encode(message.numberValues, writer.uint32(18).fork()).ldelim(); } @@ -564,13 +541,6 @@ export const ListValue = { while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: - if (tag !== 10) { - break; - } - - message.values.push(Value.decode(reader, reader.uint32())); - continue; case 2: if (tag !== 18) { break; @@ -631,7 +601,6 @@ export const ListValue = { fromJSON(object: any): ListValue { return { - values: globalThis.Array.isArray(object?.values) ? object.values.map((e: any) => Value.fromJSON(e)) : [], numberValues: isSet(object.numberValues) ? NumberValues.fromJSON(object.numberValues) : undefined, boolValues: isSet(object.boolValues) ? BoolValues.fromJSON(object.boolValues) : undefined, objectValues: isSet(object.objectValues) ? ObjectValues.fromJSON(object.objectValues) : undefined, @@ -644,9 +613,6 @@ export const ListValue = { toJSON(message: ListValue): unknown { const obj: any = {}; - if (message.values?.length) { - obj.values = message.values.map((e) => Value.toJSON(e)); - } if (message.numberValues !== undefined) { obj.numberValues = NumberValues.toJSON(message.numberValues); } @@ -676,7 +642,6 @@ export const ListValue = { }, fromPartial(object: DeepPartial): ListValue { const message = createBaseListValue(); - message.values = object.values?.map((e) => Value.fromPartial(e)) || []; message.numberValues = (object.numberValues !== undefined && object.numberValues !== null) ? NumberValues.fromPartial(object.numberValues) : undefined; diff --git a/src/proto/v1/search_get.ts b/src/proto/v1/search_get.ts index a3726d8e..0664ca37 100644 --- a/src/proto/v1/search_get.ts +++ b/src/proto/v1/search_get.ts @@ -7,20 +7,7 @@ /* eslint-disable */ import Long from "long"; import _m0 from "protobufjs/minimal.js"; -import { Struct } from "../google/protobuf/struct.js"; -import { - BooleanArrayProperties, - ConsistencyLevel, - consistencyLevelFromJSON, - consistencyLevelToJSON, - Filters, - IntArrayProperties, - NumberArrayProperties, - ObjectArrayProperties, - ObjectProperties, - TextArrayProperties, - Vectors, -} from "./base.js"; +import { ConsistencyLevel, consistencyLevelFromJSON, consistencyLevelToJSON, Filters, Vectors } from "./base.js"; import { BM25, Hybrid, @@ -213,25 +200,9 @@ export interface MetadataResult { } export interface PropertiesResult { - /** @deprecated */ - nonRefProperties: { [key: string]: any } | undefined; refProps: RefPropertiesResult[]; targetCollection: string; - metadata: - | MetadataResult - | undefined; - /** @deprecated */ - numberArrayProperties: NumberArrayProperties[]; - /** @deprecated */ - intArrayProperties: IntArrayProperties[]; - /** @deprecated */ - textArrayProperties: TextArrayProperties[]; - /** @deprecated */ - booleanArrayProperties: BooleanArrayProperties[]; - /** @deprecated */ - objectProperties: ObjectProperties[]; - /** @deprecated */ - objectArrayProperties: ObjectArrayProperties[]; + metadata: MetadataResult | undefined; nonRefProps: Properties | undefined; refPropsRequested: boolean; } @@ -2422,27 +2393,11 @@ export const MetadataResult = { }; function createBasePropertiesResult(): PropertiesResult { - return { - nonRefProperties: undefined, - refProps: [], - targetCollection: "", - metadata: undefined, - numberArrayProperties: [], - intArrayProperties: [], - textArrayProperties: [], - booleanArrayProperties: [], - objectProperties: [], - objectArrayProperties: [], - nonRefProps: undefined, - refPropsRequested: false, - }; + return { refProps: [], targetCollection: "", metadata: undefined, nonRefProps: undefined, refPropsRequested: false }; } export const PropertiesResult = { encode(message: PropertiesResult, writer: _m0.Writer = _m0.Writer.create()): _m0.Writer { - if (message.nonRefProperties !== undefined) { - Struct.encode(Struct.wrap(message.nonRefProperties), writer.uint32(10).fork()).ldelim(); - } for (const v of message.refProps) { RefPropertiesResult.encode(v!, writer.uint32(18).fork()).ldelim(); } @@ -2452,24 +2407,6 @@ export const PropertiesResult = { if (message.metadata !== undefined) { MetadataResult.encode(message.metadata, writer.uint32(34).fork()).ldelim(); } - for (const v of message.numberArrayProperties) { - NumberArrayProperties.encode(v!, writer.uint32(42).fork()).ldelim(); - } - for (const v of message.intArrayProperties) { - IntArrayProperties.encode(v!, writer.uint32(50).fork()).ldelim(); - } - for (const v of message.textArrayProperties) { - TextArrayProperties.encode(v!, writer.uint32(58).fork()).ldelim(); - } - for (const v of message.booleanArrayProperties) { - BooleanArrayProperties.encode(v!, writer.uint32(66).fork()).ldelim(); - } - for (const v of message.objectProperties) { - ObjectProperties.encode(v!, writer.uint32(74).fork()).ldelim(); - } - for (const v of message.objectArrayProperties) { - ObjectArrayProperties.encode(v!, writer.uint32(82).fork()).ldelim(); - } if (message.nonRefProps !== undefined) { Properties.encode(message.nonRefProps, writer.uint32(90).fork()).ldelim(); } @@ -2486,13 +2423,6 @@ export const PropertiesResult = { while (reader.pos < end) { const tag = reader.uint32(); switch (tag >>> 3) { - case 1: - if (tag !== 10) { - break; - } - - message.nonRefProperties = Struct.unwrap(Struct.decode(reader, reader.uint32())); - continue; case 2: if (tag !== 18) { break; @@ -2514,48 +2444,6 @@ export const PropertiesResult = { message.metadata = MetadataResult.decode(reader, reader.uint32()); continue; - case 5: - if (tag !== 42) { - break; - } - - message.numberArrayProperties.push(NumberArrayProperties.decode(reader, reader.uint32())); - continue; - case 6: - if (tag !== 50) { - break; - } - - message.intArrayProperties.push(IntArrayProperties.decode(reader, reader.uint32())); - continue; - case 7: - if (tag !== 58) { - break; - } - - message.textArrayProperties.push(TextArrayProperties.decode(reader, reader.uint32())); - continue; - case 8: - if (tag !== 66) { - break; - } - - message.booleanArrayProperties.push(BooleanArrayProperties.decode(reader, reader.uint32())); - continue; - case 9: - if (tag !== 74) { - break; - } - - message.objectProperties.push(ObjectProperties.decode(reader, reader.uint32())); - continue; - case 10: - if (tag !== 82) { - break; - } - - message.objectArrayProperties.push(ObjectArrayProperties.decode(reader, reader.uint32())); - continue; case 11: if (tag !== 90) { break; @@ -2581,30 +2469,11 @@ export const PropertiesResult = { fromJSON(object: any): PropertiesResult { return { - nonRefProperties: isObject(object.nonRefProperties) ? object.nonRefProperties : undefined, refProps: globalThis.Array.isArray(object?.refProps) ? object.refProps.map((e: any) => RefPropertiesResult.fromJSON(e)) : [], targetCollection: isSet(object.targetCollection) ? globalThis.String(object.targetCollection) : "", metadata: isSet(object.metadata) ? MetadataResult.fromJSON(object.metadata) : undefined, - numberArrayProperties: globalThis.Array.isArray(object?.numberArrayProperties) - ? object.numberArrayProperties.map((e: any) => NumberArrayProperties.fromJSON(e)) - : [], - intArrayProperties: globalThis.Array.isArray(object?.intArrayProperties) - ? object.intArrayProperties.map((e: any) => IntArrayProperties.fromJSON(e)) - : [], - textArrayProperties: globalThis.Array.isArray(object?.textArrayProperties) - ? object.textArrayProperties.map((e: any) => TextArrayProperties.fromJSON(e)) - : [], - booleanArrayProperties: globalThis.Array.isArray(object?.booleanArrayProperties) - ? object.booleanArrayProperties.map((e: any) => BooleanArrayProperties.fromJSON(e)) - : [], - objectProperties: globalThis.Array.isArray(object?.objectProperties) - ? object.objectProperties.map((e: any) => ObjectProperties.fromJSON(e)) - : [], - objectArrayProperties: globalThis.Array.isArray(object?.objectArrayProperties) - ? object.objectArrayProperties.map((e: any) => ObjectArrayProperties.fromJSON(e)) - : [], nonRefProps: isSet(object.nonRefProps) ? Properties.fromJSON(object.nonRefProps) : undefined, refPropsRequested: isSet(object.refPropsRequested) ? globalThis.Boolean(object.refPropsRequested) : false, }; @@ -2612,9 +2481,6 @@ export const PropertiesResult = { toJSON(message: PropertiesResult): unknown { const obj: any = {}; - if (message.nonRefProperties !== undefined) { - obj.nonRefProperties = message.nonRefProperties; - } if (message.refProps?.length) { obj.refProps = message.refProps.map((e) => RefPropertiesResult.toJSON(e)); } @@ -2624,24 +2490,6 @@ export const PropertiesResult = { if (message.metadata !== undefined) { obj.metadata = MetadataResult.toJSON(message.metadata); } - if (message.numberArrayProperties?.length) { - obj.numberArrayProperties = message.numberArrayProperties.map((e) => NumberArrayProperties.toJSON(e)); - } - if (message.intArrayProperties?.length) { - obj.intArrayProperties = message.intArrayProperties.map((e) => IntArrayProperties.toJSON(e)); - } - if (message.textArrayProperties?.length) { - obj.textArrayProperties = message.textArrayProperties.map((e) => TextArrayProperties.toJSON(e)); - } - if (message.booleanArrayProperties?.length) { - obj.booleanArrayProperties = message.booleanArrayProperties.map((e) => BooleanArrayProperties.toJSON(e)); - } - if (message.objectProperties?.length) { - obj.objectProperties = message.objectProperties.map((e) => ObjectProperties.toJSON(e)); - } - if (message.objectArrayProperties?.length) { - obj.objectArrayProperties = message.objectArrayProperties.map((e) => ObjectArrayProperties.toJSON(e)); - } if (message.nonRefProps !== undefined) { obj.nonRefProps = Properties.toJSON(message.nonRefProps); } @@ -2656,21 +2504,11 @@ export const PropertiesResult = { }, fromPartial(object: DeepPartial): PropertiesResult { const message = createBasePropertiesResult(); - message.nonRefProperties = object.nonRefProperties ?? undefined; message.refProps = object.refProps?.map((e) => RefPropertiesResult.fromPartial(e)) || []; message.targetCollection = object.targetCollection ?? ""; message.metadata = (object.metadata !== undefined && object.metadata !== null) ? MetadataResult.fromPartial(object.metadata) : undefined; - message.numberArrayProperties = object.numberArrayProperties?.map((e) => NumberArrayProperties.fromPartial(e)) || - []; - message.intArrayProperties = object.intArrayProperties?.map((e) => IntArrayProperties.fromPartial(e)) || []; - message.textArrayProperties = object.textArrayProperties?.map((e) => TextArrayProperties.fromPartial(e)) || []; - message.booleanArrayProperties = object.booleanArrayProperties?.map((e) => BooleanArrayProperties.fromPartial(e)) || - []; - message.objectProperties = object.objectProperties?.map((e) => ObjectProperties.fromPartial(e)) || []; - message.objectArrayProperties = object.objectArrayProperties?.map((e) => ObjectArrayProperties.fromPartial(e)) || - []; message.nonRefProps = (object.nonRefProps !== undefined && object.nonRefProps !== null) ? Properties.fromPartial(object.nonRefProps) : undefined; @@ -2800,10 +2638,6 @@ if (_m0.util.Long !== Long) { _m0.configure(); } -function isObject(value: any): boolean { - return typeof value === "object" && value !== null; -} - function isSet(value: any): boolean { return value !== null && value !== undefined; } diff --git a/src/utils/dbVersion.ts b/src/utils/dbVersion.ts index d48901bc..dff7653d 100644 --- a/src/utils/dbVersion.ts +++ b/src/utils/dbVersion.ts @@ -74,59 +74,10 @@ export class DbVersionSupport { this.dbVersionProvider.getVersion().then((version) => { return { version: version, - supports: version.isAtLeast(1, 23, 7), - message: this.errorMessage('The gRPC API', version.show(), '1.23.7'), - }; - }); - - supportsHNSWAndBQ = () => - this.dbVersionProvider.getVersion().then((version) => { - return { - version: version, - supports: version.isAtLeast(1, 24, 0), - message: this.errorMessage('HNSW index and BQ quantizer', version.show(), '1.24.0'), - }; - }); - - supportsBm25AndHybridGroupByQueries = () => - this.dbVersionProvider.getVersion().then((version) => { - return { - version: version, - supports: version.isAtLeast(1, 25, 0), - message: (query: 'Bm25' | 'Hybrid') => - this.errorMessage(`GroupBy with ${query}`, version.show(), '1.25.0'), - }; - }); - - supportsHybridNearTextAndNearVectorSubsearchQueries = () => { - return this.dbVersionProvider.getVersion().then((version) => { - return { - version: version, - supports: version.isAtLeast(1, 25, 0), - message: this.errorMessage('Hybrid nearText/nearVector subsearching', version.show(), '1.25.0'), - }; - }); - }; - - supports125ListValue = () => { - return this.dbVersionProvider.getVersion().then((version) => { - return { - version: version, - supports: version.isAtLeast(1, 25, 0), - message: undefined, - }; - }); - }; - - supportsNamedVectors = () => { - return this.dbVersionProvider.getVersion().then((version) => { - return { - version: version, - supports: version.isAtLeast(1, 24, 0), - message: this.errorMessage('Named vectors', version.show(), '1.24.0'), + supports: version.isAtLeast(1, 27, 0), + message: this.errorMessage('The gRPC API', version.show(), '1.27.0'), }; }); - }; requiresNamedVectorsInsertFix = () => { return this.dbVersionProvider.getVersion().then((version) => { @@ -146,16 +97,6 @@ export class DbVersionSupport { }); }; - supportsTenantsGetGRPCMethod = () => { - return this.dbVersionProvider.getVersion().then((version) => { - return { - version: version, - supports: version.isAtLeast(1, 25, 0), - message: this.errorMessage('Tenants get method over gRPC', version.show(), '1.25.0'), - }; - }); - }; - supportsTenantGetRESTMethod = () => this.dbVersionProvider.getVersion().then((version) => ({ version: version, @@ -163,60 +104,6 @@ export class DbVersionSupport { message: this.errorMessage('Tenant get method over REST', version.show(), '1.28.0'), })); - supportsDynamicVectorIndex = () => { - return this.dbVersionProvider.getVersion().then((version) => { - return { - version: version, - supports: version.isAtLeast(1, 25, 0), - message: this.errorMessage('Dynamic vector index', version.show(), '1.25.0'), - }; - }); - }; - - supportsMultiTargetVectorSearch = () => { - return this.dbVersionProvider.getVersion().then((version) => { - return { - version: version, - supports: version.isAtLeast(1, 26, 0), - message: this.errorMessage('Multi-target vector search', version.show(), '1.26.0'), - }; - }); - }; - - supportsMultiVectorSearch = () => { - return this.dbVersionProvider.getVersion().then((version) => { - return { - version: version, - supports: version.isAtLeast(1, 26, 0), - message: this.errorMessage('Multi-vector search', version.show(), '1.26.0'), - }; - }); - }; - - supportsMultiVectorPerTargetSearch = () => { - return this.dbVersionProvider.getVersion().then((version) => { - return { - version: version, - supports: version.isAtLeast(1, 27, 0), - message: this.errorMessage('Multi-vector-per-target search', version.show(), '1.27.0'), - }; - }); - }; - - supportsMultiWeightsPerTargetSearch = () => { - return this.dbVersionProvider.getVersion().then((version) => { - return { - version: version, - supports: version.isAtLeast(1, 27, 0), - message: this.errorMessage( - 'Multi-target vector search with multiple weights', - version.show(), - '1.27.0' - ), - }; - }); - }; - supportsAggregateGRPC = () => { return this.dbVersionProvider.getVersion().then((version) => { return { @@ -236,6 +123,7 @@ export class DbVersionSupport { }; }); }; + supportsSingleGrouped = () => this.dbVersionProvider.getVersion().then((version) => ({ version,