diff --git a/packages/sdk-codegen-scripts/src/reformatter.ts b/packages/sdk-codegen-scripts/src/reformatter.ts index de13b6094..04115e1fe 100644 --- a/packages/sdk-codegen-scripts/src/reformatter.ts +++ b/packages/sdk-codegen-scripts/src/reformatter.ts @@ -295,6 +295,28 @@ class GoFormatter extends BaseFormatter { } } +class ProtoFormatter extends BaseFormatter { + constructor() { + super('Protobuf') + } + + versionStamp() { + return warn('Skipping SDK version updating - not implemented for Protobuf.') + } +} + +class GrpcProxyFormatter extends BaseFormatter { + constructor() { + super('Grpc') + } + + versionStamp() { + return warn( + 'Skipping SDK version updating - not implemented for Grpc proxy.' + ) + } +} + type IFormatFiles = { [key: string]: string[] } type IFormatters = { [key: string]: IReformat } @@ -306,6 +328,8 @@ const fileFormatters: IFormatters = { '.swift': new SwiftFormatter(), '.ts': new TypescriptFormatter(), '.go': new GoFormatter(), + '.proto': new ProtoFormatter(), + '.java': new GrpcProxyFormatter(), } export class FilesFormatter { diff --git a/packages/sdk-codegen/src/codeGenerators.ts b/packages/sdk-codegen/src/codeGenerators.ts index 22099fe4c..e74ab6cab 100644 --- a/packages/sdk-codegen/src/codeGenerators.ts +++ b/packages/sdk-codegen/src/codeGenerators.ts @@ -32,6 +32,8 @@ import { SwiftGen } from './swift.gen' import { PythonGen } from './python.gen' import { TypescriptGen } from './typescript.gen' import { GoGen } from './go.gen' +import { ProtoGen } from './proto.gen' +import { GrpcProxyGen } from './grpc_proxy.gen' export interface IGeneratorSpec { /** source code file extension regex */ @@ -101,6 +103,18 @@ export const Generators: Array = [ options: '-papiPackage=Looker -ppackageName=looker', extension: /\.php/gi, }, + { + factory: (api: ApiModel, versions?: IVersionInfo) => + new ProtoGen(api, versions), + language: 'Protobuf', + extension: /\.proto/gi, + }, + { + factory: (api: ApiModel, versions?: IVersionInfo) => + new GrpcProxyGen(api, versions), + language: 'GrpcProxy', + extension: /\.java/gi, + }, // { // language: 'R', // legacy: 'r' diff --git a/packages/sdk-codegen/src/grpc_proxy.gen.spec.ts b/packages/sdk-codegen/src/grpc_proxy.gen.spec.ts new file mode 100644 index 000000000..eb3020e93 --- /dev/null +++ b/packages/sdk-codegen/src/grpc_proxy.gen.spec.ts @@ -0,0 +1,52 @@ +/* + + MIT License + + Copyright (c) 2020 Looker Data Sciences, Inc. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE. + + */ + +import { TestConfig } from './testUtils' +import { GrpcProxyGen } from './grpc_proxy.gen' + +const config = TestConfig() +const apiTestModel = config.apiTestModel + +const gen = new GrpcProxyGen(apiTestModel) + +describe('pseudocode', () => { + describe('method signature', () => { + it('optional body and additional param', () => { + const method = apiTestModel.methods.create_user_credentials_email + expect(method).toBeDefined() + const expected = `create_user_credentials_email(user_id, body, fields): CredentialsEmail` + const actual = gen.methodSignature('', method) + expect(actual).toEqual(expected) + }) + it('no params', () => { + const method = apiTestModel.methods.all_datagroups + expect(method).toBeDefined() + const expected = `all_datagroups(): Datagroup[]` + const actual = gen.methodSignature('', method) + expect(actual).toEqual(expected) + }) + }) +}) diff --git a/packages/sdk-codegen/src/grpc_proxy.gen.ts b/packages/sdk-codegen/src/grpc_proxy.gen.ts new file mode 100644 index 000000000..67b0cde8f --- /dev/null +++ b/packages/sdk-codegen/src/grpc_proxy.gen.ts @@ -0,0 +1,288 @@ +/* + + MIT License + + Copyright (c) 2020 Looker Data Sciences, Inc. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE. + + */ + +import { CodeGen } from './codeGen' +import { + IMethod, + IParameter, + IProperty, + IType, + titleCase, + camelCase, +} from './sdkModels' + +// eslint-disable @typescript-eslint/no-unused-vars + +/** + * Pseudocde generator + */ +export class GrpcProxyGen extends CodeGen { + codePath = './proto/grpc_proxy/src/main/java/com/google/looker/server' + packagePath = '' + sdkPath = 'sdk' + itself = '' + fileExtension = '.java' + commentStr = '// ' + nullStr = 'null' + transport = 'transport' + + argDelimiter = ', ' + paramDelimiter = ',\n' + propDelimiter = '\n' + codeQuote = '"' + enumDelimiter = ',\n' + + indentStr = ' ' + endTypeStr = '\n}' + needsRequestTypes = false + willItStream = true + + private readonly defaultApi = '4.0' + + isDefaultApi() { + return this.apiVersion === this.defaultApi + } + + supportsMultiApi() { + return false + } + + sdkFileName(baseFileName: string) { + if (baseFileName === 'streams') { + return this.fileName('sdk/LookerStreamingServiceImpl') + } else if (baseFileName === 'models') { + return this.fileName('sdk/LookerModels') + } else { + return this.fileName('sdk/LookerServiceImpl') + } + } + + /** + * Grpc Proxy Server generator + * + * @param {string} indent indentation for code + * @param {IMethod} method for signature + * @returns {string} prototype declaration of method + */ + methodSignature(indent: string, method: IMethod): string { + indent = '' + const params = method.allParams + const args = params.map((p) => p.name) + return `${indent}${method.operationId}(${args.join(', ')}): ${ + method.primaryResponse.type.name + }` + } + + construct(_indent: string, _type: IType): string { + return '' + } + + declareMethod(_indent: string, _method: IMethod): string { + const titleMethodName = titleCase(_method.operationId) + const camelMethodName = camelCase(_method.operationId) + return `${this.formatJavaDoc(_method.description)} + @Override + public void ${camelMethodName}(${titleMethodName}Request request, StreamObserver<${titleMethodName}Response> responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.${_method.httpMethod.toLowerCase()}("${ + _method.endpoint + }", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ${titleMethodName}Response.Builder responseBuilder = ${titleMethodName}Response.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + ` + } + + declareStreamer(_indent: string, _method: IMethod): string { + const titleMethodName = titleCase(_method.operationId) + const camelMethodName = camelCase(_method.operationId) + const returnCanStream = _method.returnType?.type.name.endsWith('[]') + const streamResponse = _method.returnType?.type.name.endsWith('[]') + ? 'Stream' + : '' + const onNext = returnCanStream + ? `responseBuilder.getResultList().forEach(entry -> { + ${titleMethodName}StreamResponse.Builder responseBuilder2 = ${titleMethodName}StreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + });` + : `responseObserver.onNext(responseBuilder.build());` + return `${this.formatJavaDoc(_method.description)} + @Override + public void ${camelMethodName}(${titleMethodName}Request request, StreamObserver<${titleMethodName}${streamResponse}Response> responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.${_method.httpMethod.toLowerCase()}("${ + _method.endpoint + }", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ${titleMethodName}Response.Builder responseBuilder = ${titleMethodName}Response.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + ${onNext} + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + ` + } + + declareParameter( + _indent: string, + _method: IMethod, + _param: IParameter + ): string { + return '' + } + + declareProperty(_indent: string, _property: IProperty): string { + return '' + } + + encodePathParams(_indent: string, _method: IMethod): string { + return '' + } + + methodsEpilogue(_indent: string): string { + return '}' + } + + methodsPrologue(_indent: string): string { + return this.servicesPrologue('LookerService') + } + + streamsPrologue(_indent: string): string { + return this.servicesPrologue('LookerStreamingService') + } + + modelsEpilogue(_indent: string): string { + return '}' + } + + modelsPrologue(_indent: string): string { + return ` +package com.google.looker.server.sdk; + +// DELETE THIS FILE - NOT REQUIRED + +public class LookerModels { + ` + } + + beginRegion(_: string, description: string): string { + return ` //#region ${description}` + } + + endRegion(_: string, description: string): string { + return ` //#endregion ${description}` + } + + declareType() { + return '' + } + + summary(_indent: string, _text: string | undefined): string { + return '' + } + + typeSignature(_indent: string, _type: IType): string { + return '' + } + + private servicesPrologue(serviceName: string) { + return ` +package com.google.looker.server.sdk; + +import com.google.looker.grpc.services.*; +import com.google.looker.grpc.services.${serviceName}Grpc.${serviceName}ImplBase; +import com.google.looker.server.rtl.LookerClient; +import com.google.looker.server.rtl.LookerClientResponse; +import com.google.protobuf.InvalidProtocolBufferException; +import com.google.protobuf.util.JsonFormat; +import io.grpc.Status; +import io.grpc.stub.StreamObserver; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class ${serviceName}Impl extends ${serviceName}ImplBase { + + final private static Logger LOGGER = LoggerFactory.getLogger(${serviceName}Impl.class); + + final private LookerClient lookerClient; + + public ${serviceName}Impl() { + lookerClient = new LookerClient("${this.apiVersion}"); + } + + ` + } + + private formatJavaDoc(comments: string) { + if (comments.trim().length === 0) { + return '' + } else { + const lines = comments.split('\n').map((part) => ` * ${part}\n`) + lines.unshift(' /**\n') + lines.push(' */') + return lines.join('') + } + } +} diff --git a/packages/sdk-codegen/src/proto.gen.spec.ts b/packages/sdk-codegen/src/proto.gen.spec.ts new file mode 100644 index 000000000..c027f66aa --- /dev/null +++ b/packages/sdk-codegen/src/proto.gen.spec.ts @@ -0,0 +1,52 @@ +/* + + MIT License + + Copyright (c) 2020 Looker Data Sciences, Inc. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE. + + */ + +import { TestConfig } from './testUtils' +import { ProtoGen } from './proto.gen' + +const config = TestConfig() +const apiTestModel = config.apiTestModel + +const gen = new ProtoGen(apiTestModel) + +describe('ProtoGen', () => { + describe('method signature', () => { + it('optional body and additional param', () => { + const method = apiTestModel.methods.create_user_credentials_email + expect(method).toBeDefined() + const expected = `create_user_credentials_email(user_id, body, fields): CredentialsEmail` + const actual = gen.methodSignature('', method) + expect(actual).toEqual(expected) + }) + it('no params', () => { + const method = apiTestModel.methods.all_datagroups + expect(method).toBeDefined() + const expected = `all_datagroups(): Datagroup[]` + const actual = gen.methodSignature('', method) + expect(actual).toEqual(expected) + }) + }) +}) diff --git a/packages/sdk-codegen/src/proto.gen.ts b/packages/sdk-codegen/src/proto.gen.ts new file mode 100644 index 000000000..3f01b3132 --- /dev/null +++ b/packages/sdk-codegen/src/proto.gen.ts @@ -0,0 +1,339 @@ +/* + + MIT License + + Copyright (c) 2020 Looker Data Sciences, Inc. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE. + + */ + +import { CodeGen } from './codeGen' +import { + IMethod, + IParameter, + IProperty, + IType, + EnumType, + snakeCase, + stringToHashCode, + titleCase, +} from './sdkModels' + +// eslint-disable @typescript-eslint/no-unused-vars + +/** + * Protobuf and grpc generator + */ +export class ProtoGen extends CodeGen { + saveMethods: IMethod[] = [] + codePath = './proto/grpc_proxy/src/main/proto' + packagePath = '' + sdkPath = 'sdk' + itself = '' + fileExtension = '.proto' + commentStr = '// ' + nullStr = 'null' + transport = 'transport' + + argDelimiter = ', ' + paramDelimiter = ',\n' + propDelimiter = '\n' + codeQuote = '"' + enumDelimiter = ',\n' + + indentStr = ' ' + endTypeStr = '\n}' + needsRequestTypes = false + willItStream = true + + private readonly defaultApi = '4.0' + + isDefaultApi() { + return this.apiVersion === this.defaultApi + } + + supportsMultiApi() { + return false + } + + sdkFileName(baseFileName: string) { + return this.fileName(`sdk/${baseFileName}`) + } + + /** + * grpc signature generator + * + * @param {string} indent indentation for code + * @param {IMethod} method for signature + * @returns {string} prototype declaration of method + */ + methodSignature(indent: string, method: IMethod): string { + indent = '' + const params = method.allParams + const args = params.map((p) => p.name) + return `${indent}${method.operationId}(${args.join(', ')}): ${ + method.primaryResponse.type.name + }` + } + + construct(_indent: string, _type: IType): string { + return '' + } + + declareMethod(_indent: string, _method: IMethod): string { + this.saveMethods.push(_method) + const methodName = titleCase(_method.operationId) + return `${this.formatComments( + _method.description + )} rpc ${methodName}(${methodName}Request) returns (${methodName}Response);` + } + + declareStreamer(_indent: string, _method: IMethod): string { + const methodName = titleCase(_method.operationId) + const streamResponse = _method.returnType?.type.name.endsWith('[]') + ? 'Stream' + : '' + return `${this.formatComments( + _method.description + )} rpc ${methodName}(${methodName}Request) returns (stream ${methodName}${streamResponse}Response);` + } + + declareParameter( + _indent: string, + _method: IMethod, + _param: IParameter + ): string { + return '' + } + + declareProperty(_indent: string, _property: IProperty): string { + return `${this.formatComments(_property.description)} ${this.mapType( + _property.type.name + )} ${_property.name} = ${this.generateIdentifier(_property.name)};\n` + } + + encodePathParams(_indent: string, _method: IMethod): string { + return '' + } + + methodsEpilogue(_indent: string): string { + return '}' + } + + methodsPrologue(_indent: string): string { + return this.servicesPrologue('LookerService') + } + + streamsPrologue(_indent: string): string { + return this.servicesPrologue('LookerStreamingService') + } + + modelsEpilogue(_indent: string): string { + return this.saveMethods + .map((method) => { + const isStreamResponse = method.returnType?.type.name.endsWith('[]') + let streamResponse = '' + if (isStreamResponse) { + console.log( + `stream response ${method.name} ${method.returnType?.type.name}` + ) + streamResponse = ` + +message ${titleCase(method.operationId)}StreamResponse { + ${this.methodResponse(method, 1, true).trim()} +}` + } + return ` +message ${titleCase(method.operationId)}Request { +${this.methodArguments(method)} +} + +message ${titleCase(method.operationId)}Response { + ${this.methodResponse(method, 1).trim()} +}${streamResponse} +` + }) + .join('') + } + + modelsPrologue(_indent: string): string { + return ` +syntax = "proto3"; + +package looker; + +option java_package = "com.google.looker.grpc.services"; +option java_multiple_files = true; + +import "google/protobuf/any.proto"; +import "google/protobuf/timestamp.proto"; + ` + } + + summary(_indent: string, _text: string | undefined): string { + return '' + } + + typeSignature(_indent: string, _type: IType): string { + return '' + } + + beginRegion(_: string, description: string): string { + return ` // ${description}` + } + + endRegion(): string { + return '' + } + + declareType(_: string, type: IType) { + let propertyValues = '' + let typeType + if (type instanceof EnumType) { + typeType = 'enum' + const num = type as EnumType + const typeName = snakeCase(type.name).toUpperCase() + const enumValues = num.values.map((enumType) => { + const enumName = `${typeName}_${enumType.toString().toUpperCase()}` + return ` ${enumName} = ${this.generateIdentifier(enumName)};\n` + }) + enumValues.unshift(` _${typeName}_UNSET = 0;\n`) + propertyValues = enumValues.join('') + } else { + typeType = 'message' + propertyValues = Object.values(type.properties) + .map((prop) => { + return this.declareProperty('', prop) + }) + .join('') + } + + return `${this.formatComments(type.description).trim()} +${typeType} ${type.name} { + ${propertyValues.trim()} +}` + } + + private servicesPrologue(serviceName: string) { + return ` +syntax = "proto3"; + +package looker; + +option java_package = "com.google.looker.grpc.services"; +option java_multiple_files = true; + +import 'sdk/models.proto'; + +service ${serviceName} { + ` + } + + private formatComments(comments: string) { + return comments.trim().length === 0 + ? '' + : comments + .split('\n') + .map((part) => ` // ${part}\n`) + .join('') + } + + private methodArguments(method: IMethod) { + return ( + ' ' + + method.allParams + .map((param) => { + return `${this.formatComments(param.description)} ${this.mapType( + param.type.name + )} ${param.name} = ${this.generateIdentifier(param.name)};\n` + }) + .join('') + .trim() + ) + } + + private methodResponse( + method: IMethod, + index: number, + isStreamResponse = false + ) { + if (method.returnType) { + const description = this.formatComments( + method.returnType?.description || '' + ) + const returnType = method.returnType.type.name + if (returnType === 'void') { + return `${description}` + } else { + return `${description} ${this.mapType( + returnType, + isStreamResponse + )} result = ${index};\n` + } + } else { + return '' + } + } + + private mapType(type: string, isStreamResponse = false): string { + if (type.startsWith('Hash[')) { + return `map` + } else if (type.endsWith('[]')) { + if (isStreamResponse) { + return `${type.substring(0, type.length - 2)}` + } else { + return `repeated ${type.substring(0, type.length - 2)}` + } + } else if (type === 'boolean') { + return 'bool' + } else if (type === 'datetime') { + return 'google.protobuf.Timestamp' + } else if (type === 'any') { + return 'google.protobuf.Any' + } else if (type === 'uri') { + return 'string' + } else if (type.startsWith('DelimArray')) { + // TODO handle this better + return 'string' + } else { + return type + } + } + + // Not convinced about this implementation but will do + // for now. Originally used the index of property in + // javascript object but this is a little brittle as + // there is no guarantee a developer will not insert + // a new property into the object. This generates a + // consistent value across runs. The problem is that the + // value MUST be between 0 and 536870911. To fix this + // negative values are multipled by -1. Values greater + // than 536870911 are bitwise shift right until they are + // less than equal to 536870911. So far their have been + // no collisions but I suspect there are better implementations. + private generateIdentifier(name: string): number { + let hashCode = stringToHashCode(name) + hashCode = hashCode < 0 ? hashCode * -1 : hashCode + while (hashCode > 536870911) { + hashCode = hashCode >> 1 + } + return hashCode + } +} diff --git a/packages/sdk-codegen/src/sdkModels.ts b/packages/sdk-codegen/src/sdkModels.ts index 1bbb3c7c3..165337feb 100644 --- a/packages/sdk-codegen/src/sdkModels.ts +++ b/packages/sdk-codegen/src/sdkModels.ts @@ -139,6 +139,16 @@ export const titleCase = (value: string) => { return value[0].toLocaleUpperCase() + value.substr(1) } +/** + * convert string to snake_case + * @param value string value to convert to snake_case + */ +export const snakeCase = (value: string) => { + if (!value) return '' + value = value.charAt(0).toLowerCase() + value.substring(1) + return value.replace(/[A-Z]/g, (letter) => `_${letter.toLowerCase()}`) +} + /** * Only first character of string should be uppercase * @@ -153,6 +163,18 @@ export const firstCase = (value: string) => { return value[0].toLocaleUpperCase() + value.substr(1).toLocaleLowerCase() } +/** + * Derive a hash from a string. + * @param s string to hash + */ +export const stringToHashCode = (s: string): number => { + let h = 0 + for (let n = 0; n < s.length; n++) { + h = ((h << 5) - h + s.charCodeAt(n)) | 0 + } + return h +} + export interface IModel {} /** diff --git a/proto/grpc_proxy/.env_sample b/proto/grpc_proxy/.env_sample new file mode 100644 index 000000000..6282f2a48 --- /dev/null +++ b/proto/grpc_proxy/.env_sample @@ -0,0 +1,13 @@ +GRPC_SERVER_HOST=localhost +GRPC_SERVER_LISTEN_PORT=50051 +CERT_CHAIN_FILE=ssl/server.crt +PRIVATE_KEY_FILE=ssl/server.pem +TRUST_MANAGER_FILE="ssl/ca.crt" +LOOKER_CLIENT_ID= +LOOKER_CLIENT_SECRET= +LOOKER_BASE_URL=https://self-signed.looker.com:19999 +LOOKER_VERIFY_SSL=false +TEST_LOOKER_USERNAME= +TEST_LOOKER_PASSWORD= +TEST_CONNECTION_NAME= + diff --git a/proto/grpc_proxy/.gitignore b/proto/grpc_proxy/.gitignore new file mode 100644 index 000000000..dfaf3ed14 --- /dev/null +++ b/proto/grpc_proxy/.gitignore @@ -0,0 +1,6 @@ +.env +.gradle +build +out +ssl +src/main/java/com/google/looker/server/sdk/LookerModels.java diff --git a/proto/grpc_proxy/README.md b/proto/grpc_proxy/README.md new file mode 100644 index 000000000..7b5dae9d4 --- /dev/null +++ b/proto/grpc_proxy/README.md @@ -0,0 +1,94 @@ +# Looker GRPC Proxy Server + +GRPC proxy server to a Looker instance. +1. Listens for GRPC requests. +2. Converts GRPC request to Looker JSON. +3. Calls Looker JSON rest endpoint. +4. Converts rest response to GRPC response. +5. Returns GRPC response. + +## Setup + +Install protobuf generator, `protoc`. +`brew install protobuf` + +File formatter. +`brew install clang-format` + +### SSL setup for local testing + +Run `scripts/ssl_setup.sh` + +### Generate protobuf definitions and java implementation (TODO) + +Run `scripts/gen_protobuf.sh` + +### .env setup + +Prior to server startup create a `.env` file in the root of this project (note +that it should not be added to source control). A sample file, `.env_sample`, +contains the entries required. + +``` +# host grpc server listens on. Used by the grpc client. +GRPC_SERVER_HOST=localhost +# port grpc server listens on. Used by grpc server and client. +GRPC_SERVER_LISTEN_PORT=50051 +# Certificate chain file. Used by server to support SSL setup for development. +CERT_CHAIN_FILE=ssl/server.crt +# Private key file. Used by server to support SSL setup for development. +PRIVATE_KEY_FILE=ssl/server.pem +# Trust manager file. Used by client to support SSL setup for development. +TRUST_MANAGER_FILE="ssl/ca.crt" +# Looker client id +LOOKER_CLIENT_ID= +# Looker client secret +LOOKER_CLIENT_SECRET= +# Looker server base url +LOOKER_BASE_URL=https://self-signed.looker.com:19999 +# Verify ssl. Set to false for development environmet +LOOKER_VERIFY_SSL=false +# Looker connection db username - used by ConnectionTests +TEST_LOOKER_USERNAME= +# Looker connection db password - used by ConnectionTests +TEST_LOOKER_PASSWORD= +# Looker connection name - used by ConnectionTests to test a connection +TEST_CONNECTION_NAME= +``` + +## Notes + +### Protobuf identifier generation + +Not convinced about the implementation that generates protobuf identifiers but +it will do for now. Originally it used the index of the property in javascript +object but this is a little brittle as there is no guarantee a developer will +not insert a new property into the object. This generates a consistent value +across runs. The problem is that the identifier MUST be between 0 and 536870911. +To fix this negative values are multipled by -1 and values greater than +536870911 are bitwise shifted right until they are less than or equal to +536870911. So far their have been no collisions but I suspect there are better +implementations. + +## TODOs + +In no particular order of importance. + +1. Streaming support. +2. Add rest endpoint to protobuf files. +3. Sync Looker server environment variable names with other implementations. +4. Handle response content types other that JSON. +5. Industrialize generation of proto ids (handle slight possibility of duplicates). +Verify or improve current id generator. +6. Tests for generators. +7. Tests for java support functions. +8. Add support for productionized SSL connection. +9. Separate client into another project. Tests utilizing should go with the client. +10. Consider creating a new runtime that can be embedded in helltool. Note sure it +can be done. Basically it would call the internal ruby API endpoint and negate the +need for an extra network hop. Gets http2 for free? +11. Finish authentication methods. +12. Implement refresh token. +13. Rewrite setup script in typescript and to codegen scripts package. + + diff --git a/proto/grpc_proxy/build.gradle b/proto/grpc_proxy/build.gradle new file mode 100644 index 000000000..16bf27845 --- /dev/null +++ b/proto/grpc_proxy/build.gradle @@ -0,0 +1,59 @@ +plugins { + id 'java' + id 'com.google.protobuf' version '0.8.14' + id 'idea' +} + +group 'com.google.looker' +version '1.0-SNAPSHOT' + +repositories { + mavenCentral() +} + +protobuf { + protoc { + artifact = "com.google.protobuf:protoc:3.12.0" + } + plugins { + grpc { + artifact = 'io.grpc:protoc-gen-grpc-java:1.34.1' + } + } + generateProtoTasks { + all()*.plugins { + grpc {} + } + } +} + +dependencies { + implementation 'io.grpc:grpc-netty-shaded:1.34.1' + implementation 'io.grpc:grpc-protobuf:1.34.1' + implementation 'io.grpc:grpc-stub:1.34.1' + implementation 'org.apache.commons:commons-lang3:3.11' + implementation 'com.google.protobuf:protobuf-java-util:3.14.0' + implementation 'io.github.cdimascio:java-dotenv:5.2.2' + implementation "io.ktor:ktor-client:$ktorVersion" + implementation "io.ktor:ktor-client-okhttp:$ktorVersion" + implementation "io.ktor:ktor-client-json:$ktorVersion" + implementation "io.ktor:ktor-client-jackson:$ktorVersion" + implementation 'org.slf4j:slf4j-api:1.7.30' + implementation 'org.slf4j:slf4j-simple:1.7.30' + + testImplementation 'org.junit.jupiter:junit-jupiter-api:5.6.0' + testRuntimeOnly 'org.junit.jupiter:junit-jupiter-engine' +} + +test { + useJUnitPlatform() +} + +// if you have source imports issues, add the below +sourceSets.main.java.srcDir new File(buildDir, 'generated/source') +idea { + module { + // Marks the already(!) added srcDir as "generated" + generatedSourceDirs += file('build/generated/source') + } +} diff --git a/proto/grpc_proxy/gradle.properties b/proto/grpc_proxy/gradle.properties new file mode 100644 index 000000000..f88853c87 --- /dev/null +++ b/proto/grpc_proxy/gradle.properties @@ -0,0 +1,3 @@ +org.gradle.daemon=true +org.gradle.jvmargs=-Xmx2560m +ktorVersion=1.4.2 diff --git a/proto/grpc_proxy/gradle/wrapper/gradle-wrapper.jar b/proto/grpc_proxy/gradle/wrapper/gradle-wrapper.jar new file mode 100644 index 000000000..f3d88b1c2 Binary files /dev/null and b/proto/grpc_proxy/gradle/wrapper/gradle-wrapper.jar differ diff --git a/proto/grpc_proxy/gradle/wrapper/gradle-wrapper.properties b/proto/grpc_proxy/gradle/wrapper/gradle-wrapper.properties new file mode 100644 index 000000000..ef503b1c3 --- /dev/null +++ b/proto/grpc_proxy/gradle/wrapper/gradle-wrapper.properties @@ -0,0 +1,6 @@ +#Sun Jan 10 17:12:20 PST 2021 +distributionUrl=https\://services.gradle.org/distributions/gradle-6.1.1-all.zip +distributionBase=GRADLE_USER_HOME +distributionPath=wrapper/dists +zipStorePath=wrapper/dists +zipStoreBase=GRADLE_USER_HOME diff --git a/proto/grpc_proxy/gradlew b/proto/grpc_proxy/gradlew new file mode 100755 index 000000000..2fe81a7d9 --- /dev/null +++ b/proto/grpc_proxy/gradlew @@ -0,0 +1,183 @@ +#!/usr/bin/env sh + +# +# Copyright 2015 the original author or authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +############################################################################## +## +## Gradle start up script for UN*X +## +############################################################################## + +# Attempt to set APP_HOME +# Resolve links: $0 may be a link +PRG="$0" +# Need this for relative symlinks. +while [ -h "$PRG" ] ; do + ls=`ls -ld "$PRG"` + link=`expr "$ls" : '.*-> \(.*\)$'` + if expr "$link" : '/.*' > /dev/null; then + PRG="$link" + else + PRG=`dirname "$PRG"`"/$link" + fi +done +SAVED="`pwd`" +cd "`dirname \"$PRG\"`/" >/dev/null +APP_HOME="`pwd -P`" +cd "$SAVED" >/dev/null + +APP_NAME="Gradle" +APP_BASE_NAME=`basename "$0"` + +# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' + +# Use the maximum available, or set MAX_FD != -1 to use that value. +MAX_FD="maximum" + +warn () { + echo "$*" +} + +die () { + echo + echo "$*" + echo + exit 1 +} + +# OS specific support (must be 'true' or 'false'). +cygwin=false +msys=false +darwin=false +nonstop=false +case "`uname`" in + CYGWIN* ) + cygwin=true + ;; + Darwin* ) + darwin=true + ;; + MINGW* ) + msys=true + ;; + NONSTOP* ) + nonstop=true + ;; +esac + +CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar + +# Determine the Java command to use to start the JVM. +if [ -n "$JAVA_HOME" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD="$JAVA_HOME/jre/sh/java" + else + JAVACMD="$JAVA_HOME/bin/java" + fi + if [ ! -x "$JAVACMD" ] ; then + die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." + fi +else + JAVACMD="java" + which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." +fi + +# Increase the maximum file descriptors if we can. +if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then + MAX_FD_LIMIT=`ulimit -H -n` + if [ $? -eq 0 ] ; then + if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then + MAX_FD="$MAX_FD_LIMIT" + fi + ulimit -n $MAX_FD + if [ $? -ne 0 ] ; then + warn "Could not set maximum file descriptor limit: $MAX_FD" + fi + else + warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" + fi +fi + +# For Darwin, add options to specify how the application appears in the dock +if $darwin; then + GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" +fi + +# For Cygwin or MSYS, switch paths to Windows format before running java +if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then + APP_HOME=`cygpath --path --mixed "$APP_HOME"` + CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` + JAVACMD=`cygpath --unix "$JAVACMD"` + + # We build the pattern for arguments to be converted via cygpath + ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` + SEP="" + for dir in $ROOTDIRSRAW ; do + ROOTDIRS="$ROOTDIRS$SEP$dir" + SEP="|" + done + OURCYGPATTERN="(^($ROOTDIRS))" + # Add a user-defined pattern to the cygpath arguments + if [ "$GRADLE_CYGPATTERN" != "" ] ; then + OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" + fi + # Now convert the arguments - kludge to limit ourselves to /bin/sh + i=0 + for arg in "$@" ; do + CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` + CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option + + if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition + eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` + else + eval `echo args$i`="\"$arg\"" + fi + i=`expr $i + 1` + done + case $i in + 0) set -- ;; + 1) set -- "$args0" ;; + 2) set -- "$args0" "$args1" ;; + 3) set -- "$args0" "$args1" "$args2" ;; + 4) set -- "$args0" "$args1" "$args2" "$args3" ;; + 5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; + 6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; + 7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; + 8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; + 9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; + esac +fi + +# Escape application args +save () { + for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done + echo " " +} +APP_ARGS=`save "$@"` + +# Collect all arguments for the java command, following the shell quoting and substitution rules +eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS" + +exec "$JAVACMD" "$@" diff --git a/proto/grpc_proxy/gradlew.bat b/proto/grpc_proxy/gradlew.bat new file mode 100644 index 000000000..24467a141 --- /dev/null +++ b/proto/grpc_proxy/gradlew.bat @@ -0,0 +1,100 @@ +@rem +@rem Copyright 2015 the original author or authors. +@rem +@rem Licensed under the Apache License, Version 2.0 (the "License"); +@rem you may not use this file except in compliance with the License. +@rem You may obtain a copy of the License at +@rem +@rem https://www.apache.org/licenses/LICENSE-2.0 +@rem +@rem Unless required by applicable law or agreed to in writing, software +@rem distributed under the License is distributed on an "AS IS" BASIS, +@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +@rem See the License for the specific language governing permissions and +@rem limitations under the License. +@rem + +@if "%DEBUG%" == "" @echo off +@rem ########################################################################## +@rem +@rem Gradle startup script for Windows +@rem +@rem ########################################################################## + +@rem Set local scope for the variables with windows NT shell +if "%OS%"=="Windows_NT" setlocal + +set DIRNAME=%~dp0 +if "%DIRNAME%" == "" set DIRNAME=. +set APP_BASE_NAME=%~n0 +set APP_HOME=%DIRNAME% + +@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" + +@rem Find java.exe +if defined JAVA_HOME goto findJavaFromJavaHome + +set JAVA_EXE=java.exe +%JAVA_EXE% -version >NUL 2>&1 +if "%ERRORLEVEL%" == "0" goto init + +echo. +echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:findJavaFromJavaHome +set JAVA_HOME=%JAVA_HOME:"=% +set JAVA_EXE=%JAVA_HOME%/bin/java.exe + +if exist "%JAVA_EXE%" goto init + +echo. +echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:init +@rem Get command-line arguments, handling Windows variants + +if not "%OS%" == "Windows_NT" goto win9xME_args + +:win9xME_args +@rem Slurp the command line arguments. +set CMD_LINE_ARGS= +set _SKIP=2 + +:win9xME_args_slurp +if "x%~1" == "x" goto execute + +set CMD_LINE_ARGS=%* + +:execute +@rem Setup the command line + +set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar + +@rem Execute Gradle +"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS% + +:end +@rem End local scope for the variables with windows NT shell +if "%ERRORLEVEL%"=="0" goto mainEnd + +:fail +rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of +rem the _cmd.exe /c_ return code! +if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 +exit /b 1 + +:mainEnd +if "%OS%"=="Windows_NT" endlocal + +:omega diff --git a/proto/grpc_proxy/scripts/gen_protobuf.sh b/proto/grpc_proxy/scripts/gen_protobuf.sh new file mode 100644 index 000000000..92a9b33b6 --- /dev/null +++ b/proto/grpc_proxy/scripts/gen_protobuf.sh @@ -0,0 +1,7 @@ +#!/bin/bash +cd ../../.. +yarn sdk Protobuf +rmdir proto/grpc_proxy/src/main/proto/sdk/4.0 +yarn sdk GrpcProxy +rmdir proto/grpc_proxy/src/main/java/com/google/looker/server/sdk/4.0 +rm proto/grpc_proxy/src/main/java/com/google/looker/server/sdk/LookerModels.java diff --git a/proto/grpc_proxy/scripts/gen_protobuf_openapi.sh b/proto/grpc_proxy/scripts/gen_protobuf_openapi.sh new file mode 100644 index 000000000..0cf9e74c1 --- /dev/null +++ b/proto/grpc_proxy/scripts/gen_protobuf_openapi.sh @@ -0,0 +1,5 @@ +#!/bin/bash + +# Not used as not ready for prime time. Use as reference for the Looker protobuf generator. + +openapi-generator generate -g protobuf-schema -o ../src/main/proto2 -i ../../../spec/Looker.4.0.oas.json --package-name looker --additional-properties=identifierNamingConvention=snake_case diff --git a/proto/grpc_proxy/scripts/ssl_setup.sh b/proto/grpc_proxy/scripts/ssl_setup.sh new file mode 100644 index 000000000..bd1391479 --- /dev/null +++ b/proto/grpc_proxy/scripts/ssl_setup.sh @@ -0,0 +1,11 @@ +#!/bin/bash +cd .. +test ! -d ssl && mkdir ssl +cd ssl +SERVER_CN=localhost +openssl genrsa -passout pass:1111 -des3 -out ca.key 4096 +openssl req -passin pass:1111 -new -x509 -days 365 -key ca.key -out ca.crt -subj "/CN=${SERVER_CN}" +openssl genrsa -passout pass:1111 -des3 -out server.key 4096 +openssl req -passin pass:1111 -new -key server.key -out server.csr -subj "/CN=${SERVER_CN}" +openssl x509 -req -passin pass:1111 -days 365 -in server.csr -CA ca.crt -CAkey ca.key -set_serial 01 -out server.crt +openssl pkcs8 -topk8 -nocrypt -passin pass:1111 -in server.key -out server.pem diff --git a/proto/grpc_proxy/settings.gradle b/proto/grpc_proxy/settings.gradle new file mode 100644 index 000000000..096d59181 --- /dev/null +++ b/proto/grpc_proxy/settings.gradle @@ -0,0 +1,2 @@ +rootProject.name = 'grpc_proxy' + diff --git a/proto/grpc_proxy/src/main/java/com/google/looker/client/LookerGrpcClient.java b/proto/grpc_proxy/src/main/java/com/google/looker/client/LookerGrpcClient.java new file mode 100644 index 000000000..5ee6c2f41 --- /dev/null +++ b/proto/grpc_proxy/src/main/java/com/google/looker/client/LookerGrpcClient.java @@ -0,0 +1,150 @@ +package com.google.looker.client; + +import com.google.looker.common.BearerToken; +import com.google.looker.common.Constants; +import com.google.looker.grpc.services.AccessToken; +import com.google.looker.grpc.services.LoginRequest; +import com.google.looker.grpc.services.LoginResponse; +import com.google.looker.grpc.services.LogoutRequest; +import com.google.looker.grpc.services.LookerServiceGrpc; +import com.google.looker.grpc.services.LookerStreamingServiceGrpc; +import com.google.looker.grpc.services.PingServiceGrpc; +import io.github.cdimascio.dotenv.Dotenv; +import io.grpc.ManagedChannel; +import io.grpc.netty.shaded.io.grpc.netty.GrpcSslContexts; +import io.grpc.netty.shaded.io.grpc.netty.NettyChannelBuilder; +import java.io.File; +import javax.net.ssl.SSLException; +import org.apache.commons.lang3.math.NumberUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class LookerGrpcClient { + + static { + Dotenv dotenv = Dotenv.load(); + dotenv.entries().forEach(e -> System.setProperty(e.getKey(), e.getValue())); + } + + private static final Logger LOGGER = LoggerFactory.getLogger(LookerGrpcClient.class); + + private SSLException initFailure; + private ManagedChannel channel; + private PingServiceGrpc.PingServiceBlockingStub pingBlockingStub; + private LookerServiceGrpc.LookerServiceBlockingStub lookerServiceBlockingStub; + private LookerStreamingServiceGrpc.LookerStreamingServiceStub lookerStreamingServiceStub; + private AccessToken accessTokenResult; + + public LookerGrpcClient() { + try { + channel = NettyChannelBuilder + .forAddress( + System.getProperty(Constants.GRPC_SERVER_HOST), + NumberUtils.toInt(System.getProperty(Constants.GRPC_SERVER_LISTEN_PORT)) + ) + .sslContext( + GrpcSslContexts + .forClient() + .trustManager(new File(System.getProperty(Constants.TRUST_MANAGER_FILE)) + ).build()) + .build(); + } catch (SSLException e) { + LOGGER.error("initialization failure"); + initFailure = e; + } + } + + public PingServiceGrpc.PingServiceBlockingStub getPingBlockingStub() throws SSLException { + if (initFailure != null) { + throw initFailure; + } + if (pingBlockingStub == null) { + pingBlockingStub = PingServiceGrpc + .newBlockingStub(channel); + } + return pingBlockingStub; + } + + public LookerServiceGrpc.LookerServiceBlockingStub getLookerServiceBlockingStub() throws SSLException { + if (initFailure != null) { + throw initFailure; + } + if (lookerServiceBlockingStub == null) { + if (accessTokenResult == null) { + LOGGER.debug("create blocking stub WITHOUT credentials"); + lookerServiceBlockingStub = LookerServiceGrpc + .newBlockingStub(channel); + } else { + LOGGER.debug("create blocking stub WITH credentials: " + accessTokenResult.getAccessToken()); + BearerToken token = new BearerToken(accessTokenResult.getAccessToken()); + lookerServiceBlockingStub = LookerServiceGrpc + .newBlockingStub(channel) + .withCallCredentials(token); + } + } + return lookerServiceBlockingStub; + } + + public LookerStreamingServiceGrpc.LookerStreamingServiceStub getLookerStreamingServiceStub() throws SSLException { + if (initFailure != null) { + throw initFailure; + } + if (lookerStreamingServiceStub == null) { + if (accessTokenResult == null) { + LOGGER.debug("create blocking stub WITHOUT credentials"); + lookerStreamingServiceStub = LookerStreamingServiceGrpc + .newStub(channel); + } else { + LOGGER.debug("create blocking stub WITH credentials: " + accessTokenResult.getAccessToken()); + BearerToken token = new BearerToken(accessTokenResult.getAccessToken()); + lookerStreamingServiceStub = LookerStreamingServiceGrpc + .newStub(channel) + .withCallCredentials(token); + } + } + return lookerStreamingServiceStub; + } + + public void clearAccessToken() { + accessTokenResult = null; + lookerServiceBlockingStub = null; + lookerStreamingServiceStub = null; + } + + public void login() throws SSLException { + accessTokenResult = null; + lookerServiceBlockingStub = null; + lookerStreamingServiceStub = null; + LookerServiceGrpc.LookerServiceBlockingStub stub = getLookerServiceBlockingStub(); + LoginResponse response = stub.login( + LoginRequest + .newBuilder() + .setClientId(System.getProperty(Constants.LOOKER_CLIENT_ID)) + .setClientSecret(System.getProperty(Constants.LOOKER_CLIENT_SECRET)) + .build() + ); + accessTokenResult = response.getResult(); + lookerServiceBlockingStub = null; + } + + public void logout() throws SSLException { + if (accessTokenResult != null) { + LookerServiceGrpc.LookerServiceBlockingStub stub = getLookerServiceBlockingStub(); + accessTokenResult = null; + lookerServiceBlockingStub = null; + stub.logout( + LogoutRequest + .newBuilder() + .build() + ); + } + } + + public String getAccessToken() { + return accessTokenResult == null ? null: accessTokenResult.getAccessToken(); + } + + public long getAccessTokenExpires() { + return accessTokenResult == null ? -1: accessTokenResult.getExpiresIn(); + } +} diff --git a/proto/grpc_proxy/src/main/java/com/google/looker/common/BearerToken.java b/proto/grpc_proxy/src/main/java/com/google/looker/common/BearerToken.java new file mode 100644 index 000000000..1478f1a59 --- /dev/null +++ b/proto/grpc_proxy/src/main/java/com/google/looker/common/BearerToken.java @@ -0,0 +1,36 @@ +package com.google.looker.common; + +import io.grpc.CallCredentials; +import io.grpc.Metadata; +import io.grpc.Status; +import java.util.concurrent.Executor; + +public class BearerToken extends CallCredentials { + + private String value; + + public BearerToken(String value) { + this.value = value; + } + + @Override + public void applyRequestMetadata(RequestInfo requestInfo, Executor executor, MetadataApplier metadataApplier) { + executor.execute(() -> { + try { + Metadata headers = new Metadata(); + headers.put( + Constants.AUTHORIZATION_METADATA_KEY, + String.format("%s %s", Constants.BEARER_TYPE, value) + ); + metadataApplier.apply(headers); + } catch (Throwable e) { + metadataApplier.fail(Status.UNAUTHENTICATED.withCause(e)); + } + }); + } + + @Override + public void thisUsesUnstableApi() { + // noop + } +} diff --git a/proto/grpc_proxy/src/main/java/com/google/looker/common/Constants.java b/proto/grpc_proxy/src/main/java/com/google/looker/common/Constants.java new file mode 100644 index 000000000..5914be10b --- /dev/null +++ b/proto/grpc_proxy/src/main/java/com/google/looker/common/Constants.java @@ -0,0 +1,21 @@ +package com.google.looker.common; + +import static io.grpc.Metadata.ASCII_STRING_MARSHALLER; + +import io.grpc.Context; +import io.grpc.Metadata; + +public class Constants { + public static final String GRPC_SERVER_HOST = "GRPC_SERVER_HOST"; + public static final String GRPC_SERVER_LISTEN_PORT = "GRPC_SERVER_LISTEN_PORT"; + public static final String LOOKER_CLIENT_ID = "LOOKER_CLIENT_ID"; + public static final String LOOKER_CLIENT_SECRET = "LOOKER_CLIENT_SECRET"; + public static final String LOOKER_BASE_URL = "LOOKER_BASE_URL"; + public static final String LOOKER_VERIFY_SSL = "LOOKER_VERIFY_SSL"; + public static final String CERT_CHAIN_FILE = "CERT_CHAIN_FILE"; + public static final String PRIVATE_KEY_FILE = "PRIVATE_KEY_FILE"; + public static final String TRUST_MANAGER_FILE = "TRUST_MANAGER_FILE"; + public static final String BEARER_TYPE = "Bearer"; + public static final Metadata.Key AUTHORIZATION_METADATA_KEY = Metadata.Key.of("Authorization", ASCII_STRING_MARSHALLER); + public static final Context.Key CLIENT_ID_CONTEXT_KEY = Context.key("clientId"); +} diff --git a/proto/grpc_proxy/src/main/java/com/google/looker/server/Server.java b/proto/grpc_proxy/src/main/java/com/google/looker/server/Server.java new file mode 100644 index 000000000..1ce4960c6 --- /dev/null +++ b/proto/grpc_proxy/src/main/java/com/google/looker/server/Server.java @@ -0,0 +1,55 @@ +package com.google.looker.server; + +import com.google.looker.common.Constants; +import com.google.looker.server.rtl.auth.AuthorizationInterceptor; +import com.google.looker.server.rtl.ping.PingServiceImpl; +import com.google.looker.server.sdk.LookerServiceImpl; +import com.google.looker.server.sdk.LookerStreamingServiceImpl; +import io.github.cdimascio.dotenv.Dotenv; +import io.grpc.ServerBuilder; +import java.io.File; +import java.io.IOException; +import org.apache.commons.lang3.math.NumberUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class Server { + + static { + Dotenv dotenv = Dotenv.load(); + dotenv.entries().forEach(e -> System.setProperty(e.getKey(), e.getValue())); + } + + private static final Logger LOGGER = LoggerFactory.getLogger(Server.class); + + public void run() throws IOException, InterruptedException { + System.out.println("Server starting up"); + io.grpc.Server server = ServerBuilder + .forPort(NumberUtils.toInt(System.getProperty(Constants.GRPC_SERVER_LISTEN_PORT))) + .addService(new PingServiceImpl()) + .addService(new LookerServiceImpl()) + .addService(new LookerStreamingServiceImpl()) + .useTransportSecurity( + new File(System.getProperty(Constants.CERT_CHAIN_FILE)), + new File(System.getProperty(Constants.PRIVATE_KEY_FILE)) + ) + .intercept(new AuthorizationInterceptor()) + .build(); + server.start(); + LOGGER.info("Server running"); + Runtime.getRuntime().addShutdownHook(new Thread(() -> { + LOGGER.info("Server shutdown request received"); + server.shutdown(); + LOGGER.info("Server shutdown"); + })); + server.awaitTermination(); + } + + public static void main(String[] args) throws IOException, InterruptedException { + try { + new Server().run(); + } catch (Exception e) { + e.printStackTrace(); + } + } +} diff --git a/proto/grpc_proxy/src/main/java/com/google/looker/server/rtl/DefaultTransport.java b/proto/grpc_proxy/src/main/java/com/google/looker/server/rtl/DefaultTransport.java new file mode 100644 index 000000000..43b281437 --- /dev/null +++ b/proto/grpc_proxy/src/main/java/com/google/looker/server/rtl/DefaultTransport.java @@ -0,0 +1,201 @@ +package com.google.looker.server.rtl; + +import com.google.gson.Gson; +import com.google.gson.reflect.TypeToken; +import com.google.looker.common.Constants; +import io.grpc.Status; +import java.io.IOException; +import java.io.UnsupportedEncodingException; +import java.lang.reflect.Type; +import java.net.URLEncoder; +import java.security.KeyManagementException; +import java.security.NoSuchAlgorithmException; +import java.util.HashMap; +import java.util.Map; +import javax.net.ssl.SSLContext; +import javax.net.ssl.SSLSocketFactory; +import javax.net.ssl.TrustManager; +import javax.net.ssl.X509TrustManager; +import okhttp3.MediaType; +import okhttp3.OkHttpClient; +import okhttp3.Request; +import okhttp3.RequestBody; +import okhttp3.Response; +import okhttp3.ResponseBody; +import org.apache.commons.lang3.ClassUtils; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +final public class DefaultTransport implements Transport { + + final private static Logger LOGGER = LoggerFactory.getLogger(DefaultTransport.class); + + private OkHttpClient client; + + public static final MediaType JSON + = MediaType.parse("application/json; charset=utf-8"); + + public LookerClientResponse request( + String apiVersion, + HttpMethod method, + String path, + String inputJson) { + String accessToken = Constants.CLIENT_ID_CONTEXT_KEY.get(); + if (accessToken == null || StringUtils.isBlank(accessToken)) { + LOGGER.debug("request ignored because no access token"); + return new LookerClientResponse(Status.NOT_FOUND); + } else { + Gson gson = new Gson(); + Type inputDataMapType = new TypeToken>() { + }.getType(); + Map inputData = gson.fromJson(inputJson, inputDataMapType); + String fullPath = makePath(apiVersion, path, inputData); + Request.Builder requestBuilder = new Request.Builder() + .url(fullPath) + .addHeader("Content-Type", "application/json") + .addHeader("Authorization", "Bearer " + accessToken) + .addHeader("x-looker-appid", "Looker GRPC Proxy Server"); + addMethod(requestBuilder, method, inputData); + Request request = requestBuilder.build(); + try { + Response response = getHttpClient().newCall(request).execute(); + int statusCode = response.code(); + if (statusCode > 199 && statusCode < 300) { + // TODO do not assume json + ResponseBody responseBody = response.body(); + if (responseBody == null) { + LOGGER.error("response has no body"); + return new LookerClientResponse(Status.NOT_FOUND); + } else { + String lookerResponse = responseBody.string(); + if (!(lookerResponse.startsWith("{") || lookerResponse.startsWith("["))) { + // TODO handle number or boolean instead of assuming string + lookerResponse = "\"" + lookerResponse + "\""; + } + String defaultResponse = String.format("{\"result\":%s}", lookerResponse); + LOGGER.debug("request succeeded " + defaultResponse); + return new LookerClientResponse(statusCode, defaultResponse); + } + } else { + LOGGER.debug("request failed: " + statusCode); + return new LookerClientResponse(statusCode); + } + } catch (IOException | KeyManagementException | NoSuchAlgorithmException e) { + LOGGER.error("login request failed", e); + return new LookerClientResponse(Status.INTERNAL); + } + } + } + + final public OkHttpClient getHttpClient() + throws NoSuchAlgorithmException, KeyManagementException { + if (this.client == null) { + OkHttpClient.Builder builder = new OkHttpClient.Builder(); + if (System.getProperty(Constants.LOOKER_VERIFY_SSL).equals("false")) { + final TrustManager[] trustAllCerts = new TrustManager[]{ + new X509TrustManager() { + + @Override + public void checkClientTrusted(java.security.cert.X509Certificate[] chain, + String authType) { + } + + @Override + public void checkServerTrusted(java.security.cert.X509Certificate[] chain, + String authType) { + } + + @Override + public java.security.cert.X509Certificate[] getAcceptedIssuers() { + return new java.security.cert.X509Certificate[]{}; + } + } + }; + final SSLContext sslContext = SSLContext.getInstance("SSL"); + sslContext.init(null, trustAllCerts, new java.security.SecureRandom()); + final SSLSocketFactory sslSocketFactory = sslContext.getSocketFactory(); + builder.sslSocketFactory(sslSocketFactory, (X509TrustManager) trustAllCerts[0]) + .hostnameVerifier((hostname, session) -> true) + ; + } + this.client = builder.build(); + } + return this.client; + } + + public String makePath(String apiVersion, String path, Map inputData) { + String fullPath = String.format("%s/api/%s%s", + System.getProperty(Constants.LOOKER_BASE_URL), + apiVersion, + updatePath(path, inputData) + ); + LOGGER.debug("fullpath=" + fullPath); + return fullPath; + } + + private String updatePath(String path, Map inputData) { + Map qsMap = new HashMap<>(); + StringBuilder updatedPath = new StringBuilder(path); + for (Map.Entry entry : inputData.entrySet()) { + String key = entry.getKey(); + Object value = entry.getValue(); + // TODO the handling of the body is potentially brittle. Modify generator to make it not so. + if (!key.equals("body") && + value != null && + (value instanceof String || ClassUtils.isPrimitiveOrWrapper(value.getClass()))) { + String searchValue = "{" + key + "}"; + if (StringUtils.contains(updatedPath.toString(), searchValue)) { + updatedPath = new StringBuilder(StringUtils + .replace(updatedPath.toString(), searchValue, value.toString())); + } else { + qsMap.put(entry.getKey(), entry.getValue().toString()); + } + } + } + if (qsMap.size() > 0) { + String sep = "?"; + for (Map.Entry entry : qsMap.entrySet()) { + updatedPath.append(sep); + updatedPath.append(entry.getKey()).append("=").append(encodeValue(entry.getValue())); + sep = "&"; + } + } + return updatedPath.toString(); + } + + private String encodeValue(String value) { + try { + return URLEncoder.encode(value, "UTF8"); + } catch (UnsupportedEncodingException e) { + return value; + } + } + + private void addMethod(Request.Builder requestBuilder, HttpMethod method, Map inputData) { + switch (method) { + case GET: + requestBuilder.get(); + break; + case POST: + requestBuilder.post(createRequestBody(inputData.get("body"))); + break; + case PUT: + requestBuilder.put(createRequestBody(inputData.get("body"))); + break; + case PATCH: + requestBuilder.patch(createRequestBody(inputData.get("body"))); + break; + case DELETE: + requestBuilder.delete(createRequestBody(inputData.get("body"))); + break; + } + } + + private RequestBody createRequestBody(Object body) { + Gson gson = new Gson(); + String jsonBody = body == null ? "{}" : gson.toJson(body); + return RequestBody.create(jsonBody, MediaType.parse("application/json")); + } + +} diff --git a/proto/grpc_proxy/src/main/java/com/google/looker/server/rtl/LoginTransport.java b/proto/grpc_proxy/src/main/java/com/google/looker/server/rtl/LoginTransport.java new file mode 100644 index 000000000..48e779228 --- /dev/null +++ b/proto/grpc_proxy/src/main/java/com/google/looker/server/rtl/LoginTransport.java @@ -0,0 +1,71 @@ +package com.google.looker.server.rtl; + +import com.google.gson.Gson; +import com.google.gson.reflect.TypeToken; +import com.google.looker.common.Constants; +import io.grpc.Status; +import java.io.IOException; +import java.lang.reflect.Type; +import java.security.KeyManagementException; +import java.security.NoSuchAlgorithmException; +import java.util.Map; +import okhttp3.FormBody; +import okhttp3.OkHttpClient; +import okhttp3.Request; +import okhttp3.Response; +import okhttp3.ResponseBody; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class LoginTransport implements Transport { + + final private static Logger LOGGER = LoggerFactory.getLogger(LoginTransport.class); + + public LookerClientResponse request( + String apiVersion, + HttpMethod method, + String path, + String inputJson) { + LOGGER.debug(inputJson); + Gson gson = new Gson(); + Type inputDataMapType = new TypeToken>() {}.getType(); + Map inputData = gson.fromJson(inputJson, inputDataMapType); + FormBody.Builder builder = new FormBody.Builder(); + inputData.forEach((k, v) -> builder.add(k, (String) v)); + String fullPath = String.format("%s/api/%s%s", System.getProperty(Constants.LOOKER_BASE_URL), apiVersion, path); + LOGGER.debug("fullpath=" + fullPath); + Request request = new Request.Builder() + .url(fullPath) + .addHeader("Content-Type", "application/x-www-form-urlencoded; charset=UTF-8") + .addHeader("x-looker-appid", "Looker GRPC Proxy Server") + .post(builder.build()) + .build(); + try { + Response response = getHttpClient().newCall(request).execute(); + int statusCode = response.code(); + if (statusCode > 199 && statusCode < 300) { + ResponseBody responseBody = response.body(); + if (responseBody == null) { + LOGGER.error("login response has no body"); + return new LookerClientResponse(Status.NOT_FOUND); + } else { + String loginResponse = String.format("{\"result\":%s}", responseBody.string()); + LOGGER.debug("login request succeeded " + loginResponse); + return new LookerClientResponse(statusCode, loginResponse); + } + } else { + LOGGER.debug("login request failed: " + statusCode); + return new LookerClientResponse(statusCode); + } + } catch (IOException | KeyManagementException | NoSuchAlgorithmException e) { + LOGGER.error("login request failed", e); + return new LookerClientResponse(Status.INTERNAL); + } + } + + @Override + public OkHttpClient getHttpClient() throws KeyManagementException, NoSuchAlgorithmException { + return TransportFactory.instance().getDefaultTransport().getHttpClient(); + } + +} diff --git a/proto/grpc_proxy/src/main/java/com/google/looker/server/rtl/LogoutTransport.java b/proto/grpc_proxy/src/main/java/com/google/looker/server/rtl/LogoutTransport.java new file mode 100644 index 000000000..20146c81b --- /dev/null +++ b/proto/grpc_proxy/src/main/java/com/google/looker/server/rtl/LogoutTransport.java @@ -0,0 +1,71 @@ +package com.google.looker.server.rtl; + +import com.google.looker.common.Constants; +import io.grpc.Status; +import java.io.IOException; +import java.security.KeyManagementException; +import java.security.NoSuchAlgorithmException; +import okhttp3.OkHttpClient; +import okhttp3.Request; +import okhttp3.Response; +import okhttp3.ResponseBody; +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class LogoutTransport implements Transport { + + final private static Logger LOGGER = LoggerFactory.getLogger(LogoutTransport.class); + + public LookerClientResponse request( + String apiVersion, + HttpMethod method, + String path, + String inputJson) { + String accessToken = Constants.CLIENT_ID_CONTEXT_KEY.get(); + if (accessToken == null || StringUtils.isBlank(accessToken)) { + LOGGER.debug("logout request ignored because no access token"); + // But we dont care + return new LookerClientResponse(200); + } else { + LOGGER.debug(inputJson); + String fullPath = String.format("%s/api/%s%s", System.getProperty(Constants.LOOKER_BASE_URL), apiVersion, path); + LOGGER.debug("fullpath=" + fullPath); + Request request = new Request.Builder() + .addHeader("Content-Type", "application/json") + .addHeader("Authorization", "Bearer " + accessToken) + .addHeader("x-looker-appid", "Looker GRPC Proxy Server") + .url(fullPath) + .delete() + .build(); + try { + Response response = getHttpClient().newCall(request).execute(); + int statusCode = response.code(); + if (statusCode > 199 && statusCode < 300) { + ResponseBody responseBody = response.body(); + LOGGER.debug("logout request succeeded"); + if (responseBody == null) { + return new LookerClientResponse(statusCode); + } else { + String logoutResponse = String.format("{\"result\":\"%s\"}", responseBody.string()); + return new LookerClientResponse(statusCode, logoutResponse); + } + } else { + LOGGER.debug("logout request failed: " + statusCode); + // But we dont care + return new LookerClientResponse(200); + } + } catch (IOException | KeyManagementException | NoSuchAlgorithmException e) { + LOGGER.error("logout request failed", e); + return new LookerClientResponse(Status.INTERNAL); + } + + } + } + + @Override + public OkHttpClient getHttpClient() throws KeyManagementException, NoSuchAlgorithmException { + return TransportFactory.instance().getDefaultTransport().getHttpClient(); + } + +} diff --git a/proto/grpc_proxy/src/main/java/com/google/looker/server/rtl/LookerClient.java b/proto/grpc_proxy/src/main/java/com/google/looker/server/rtl/LookerClient.java new file mode 100644 index 000000000..19d19a567 --- /dev/null +++ b/proto/grpc_proxy/src/main/java/com/google/looker/server/rtl/LookerClient.java @@ -0,0 +1,30 @@ +package com.google.looker.server.rtl; + +public class LookerClient { + final private String apiVersion; + + public LookerClient(String apiVersion) { + this.apiVersion = apiVersion; + } + + public LookerClientResponse get(String path, String inputJson) { + return TransportFactory.instance().getTransport(path).request(apiVersion, Transport.HttpMethod.GET, path, inputJson); + } + + public LookerClientResponse post(String path, String inputJson) { + return TransportFactory.instance().getTransport(path).request(apiVersion, Transport.HttpMethod.POST, path, inputJson); + } + + public LookerClientResponse put(String path, String inputJson) { + return TransportFactory.instance().getTransport(path).request(apiVersion, Transport.HttpMethod.PUT, path, inputJson); + } + + public LookerClientResponse patch(String path, String inputJson) { + return TransportFactory.instance().getTransport(path).request(apiVersion, Transport.HttpMethod.PATCH, path, inputJson); + } + + public LookerClientResponse delete(String path, String inputJson) { + return TransportFactory.instance().getTransport(path).request(apiVersion, Transport.HttpMethod.DELETE, path, inputJson); + } + +} diff --git a/proto/grpc_proxy/src/main/java/com/google/looker/server/rtl/LookerClientResponse.java b/proto/grpc_proxy/src/main/java/com/google/looker/server/rtl/LookerClientResponse.java new file mode 100644 index 000000000..237e4ef42 --- /dev/null +++ b/proto/grpc_proxy/src/main/java/com/google/looker/server/rtl/LookerClientResponse.java @@ -0,0 +1,43 @@ +package com.google.looker.server.rtl; + +import io.grpc.Status; + +public class LookerClientResponse { + + private Status status; + private boolean success; + private String jsonResponse; + + public LookerClientResponse(Status status) { + this.status = status; + } + + public LookerClientResponse(int statusCode,String jsonResponse) { + generateStatus(statusCode); + this.jsonResponse = jsonResponse; + } + + public LookerClientResponse(int statusCode) { + generateStatus(statusCode); + } + + public Status getStatus() { + return status; + } + + public String getJsonResponse() { + return jsonResponse; + } + + private void generateStatus(int statusCode) { + if (statusCode < 200) { + status = Status.INTERNAL; + } else if (statusCode > 299) { + if (statusCode == 404 || statusCode == 401 || statusCode == 403) { + status = Status.NOT_FOUND; + } else { + status = Status.INTERNAL; + } + } + } +} diff --git a/proto/grpc_proxy/src/main/java/com/google/looker/server/rtl/Transport.java b/proto/grpc_proxy/src/main/java/com/google/looker/server/rtl/Transport.java new file mode 100644 index 000000000..f24a5ed00 --- /dev/null +++ b/proto/grpc_proxy/src/main/java/com/google/looker/server/rtl/Transport.java @@ -0,0 +1,22 @@ +package com.google.looker.server.rtl; + +import java.security.KeyManagementException; +import java.security.NoSuchAlgorithmException; +import okhttp3.OkHttpClient; + +public interface Transport { + enum HttpMethod { + GET, + POST, + PUT, + PATCH, + DELETE + } + LookerClientResponse request( + String apiVersion, + HttpMethod method, + String path, + String inputJson); + + OkHttpClient getHttpClient() throws NoSuchAlgorithmException, KeyManagementException; +} diff --git a/proto/grpc_proxy/src/main/java/com/google/looker/server/rtl/TransportFactory.java b/proto/grpc_proxy/src/main/java/com/google/looker/server/rtl/TransportFactory.java new file mode 100644 index 000000000..eefd75aed --- /dev/null +++ b/proto/grpc_proxy/src/main/java/com/google/looker/server/rtl/TransportFactory.java @@ -0,0 +1,35 @@ +package com.google.looker.server.rtl; + +public class TransportFactory { + + private static TransportFactory instance = new TransportFactory(); + + public static TransportFactory instance() { + return instance; + } + + final private Transport defaultTransport; + final private Transport loginTransport; + final private Transport logoutTransport; + + private TransportFactory() { + defaultTransport = new DefaultTransport(); + loginTransport = new LoginTransport(); + logoutTransport = new LogoutTransport(); + } + + public Transport getDefaultTransport() { + return defaultTransport; + } + + public Transport getTransport(String path) { + if (path.startsWith("/login")) { + return loginTransport; + } else if (path.startsWith("/logout")) { + return logoutTransport; + } else { + return defaultTransport; + } + } + +} diff --git a/proto/grpc_proxy/src/main/java/com/google/looker/server/rtl/auth/AuthorizationInterceptor.java b/proto/grpc_proxy/src/main/java/com/google/looker/server/rtl/auth/AuthorizationInterceptor.java new file mode 100644 index 000000000..fcd975a71 --- /dev/null +++ b/proto/grpc_proxy/src/main/java/com/google/looker/server/rtl/auth/AuthorizationInterceptor.java @@ -0,0 +1,58 @@ +package com.google.looker.server.rtl.auth; + +import com.google.looker.common.Constants; +import io.grpc.Context; +import io.grpc.Contexts; +import io.grpc.Metadata; +import io.grpc.ServerCall; +import io.grpc.ServerCallHandler; +import io.grpc.ServerInterceptor; +import io.grpc.Status; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class AuthorizationInterceptor implements ServerInterceptor { + + final private static Logger LOGGER = LoggerFactory.getLogger(AuthorizationInterceptor.class); + + private final Set unsecuredMethods + = new HashSet<>(Arrays.asList("Login", "LoginUser", "Ping")); + + public AuthorizationInterceptor() { + } + + @Override + public ServerCall.Listener interceptCall(ServerCall serverCall, Metadata metadata, ServerCallHandler serverCallHandler) { + String value = metadata.get(Constants.AUTHORIZATION_METADATA_KEY); + LOGGER.debug("AUTHORIZATION_METADATA_KEY=" + value); + LOGGER.info(String.format("Method called is %s", serverCall.getMethodDescriptor().getBareMethodName())); + Status status; + if (value == null) { + if (unsecuredMethods.contains(serverCall.getMethodDescriptor().getBareMethodName())) { + Context ctx = Context.current(); + return Contexts.interceptCall(ctx, serverCall, metadata, serverCallHandler); + } else { + status = Status.UNAUTHENTICATED.withDescription("Authorization token is missing"); + } + } else if (!value.startsWith(Constants.BEARER_TYPE)) { + status = Status.UNAUTHENTICATED.withDescription("Unknown authorization type"); + } else { + try { + String token = value.substring(Constants.BEARER_TYPE.length()).trim(); + LOGGER.debug("bearer token=" + token); + Context ctx = Context.current().withValue(Constants.CLIENT_ID_CONTEXT_KEY, token); + return Contexts.interceptCall(ctx, serverCall, metadata, serverCallHandler); + } catch (Exception e) { + status = Status.UNAUTHENTICATED.withDescription(e.getMessage()).withCause(e); + } + } + + serverCall.close(status, metadata); + return new ServerCall.Listener() { + // noop + }; + } +} diff --git a/proto/grpc_proxy/src/main/java/com/google/looker/server/rtl/ping/PingServiceImpl.java b/proto/grpc_proxy/src/main/java/com/google/looker/server/rtl/ping/PingServiceImpl.java new file mode 100644 index 000000000..9257da0b5 --- /dev/null +++ b/proto/grpc_proxy/src/main/java/com/google/looker/server/rtl/ping/PingServiceImpl.java @@ -0,0 +1,15 @@ +package com.google.looker.server.rtl.ping; + +import com.google.looker.grpc.services.PingServiceGrpc.PingServiceImplBase; +import com.google.looker.server.rtl.PingRequest; +import com.google.looker.server.rtl.PingResponse; +import io.grpc.stub.StreamObserver; + +public class PingServiceImpl extends PingServiceImplBase { + + @Override + public void ping(PingRequest request, StreamObserver responseObserver) { + responseObserver.onNext(PingResponse.newBuilder().setActive(true).build()); + responseObserver.onCompleted(); + } +} diff --git a/proto/grpc_proxy/src/main/java/com/google/looker/server/sdk/LookerServiceImpl.java b/proto/grpc_proxy/src/main/java/com/google/looker/server/sdk/LookerServiceImpl.java new file mode 100644 index 000000000..fc04b4de4 --- /dev/null +++ b/proto/grpc_proxy/src/main/java/com/google/looker/server/sdk/LookerServiceImpl.java @@ -0,0 +1,14550 @@ +// MIT License +// +// Copyright (c) 2019 Looker Data Sciences, Inc. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +// 401 API methods + + +package com.google.looker.server.sdk; + +import com.google.looker.grpc.services.*; +import com.google.looker.grpc.services.LookerServiceGrpc.LookerServiceImplBase; +import com.google.looker.server.rtl.LookerClient; +import com.google.looker.server.rtl.LookerClientResponse; +import com.google.protobuf.InvalidProtocolBufferException; +import com.google.protobuf.util.JsonFormat; +import io.grpc.Status; +import io.grpc.stub.StreamObserver; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class LookerServiceImpl extends LookerServiceImplBase { + + final private static Logger LOGGER = LoggerFactory.getLogger(LookerServiceImpl.class); + + final private LookerClient lookerClient; + + public LookerServiceImpl() { + lookerClient = new LookerClient("4.0"); + } + + + //#region ApiAuth: API Authentication + + /** + * ### Present client credentials to obtain an authorization token + * + * Looker API implements the OAuth2 [Resource Owner Password Credentials Grant](https://looker.com/docs/r/api/outh2_resource_owner_pc) pattern. + * The client credentials required for this login must be obtained by creating an API3 key on a user account + * in the Looker Admin console. The API3 key consists of a public `client_id` and a private `client_secret`. + * + * The access token returned by `login` must be used in the HTTP Authorization header of subsequent + * API requests, like this: + * ``` + * Authorization: token 4QDkCyCtZzYgj4C2p2cj3csJH7zqS5RzKs2kTnG4 + * ``` + * Replace "4QDkCy..." with the `access_token` value returned by `login`. + * The word `token` is a string literal and must be included exactly as shown. + * + * This function can accept `client_id` and `client_secret` parameters as URL query params or as www-form-urlencoded params in the body of the HTTP request. Since there is a small risk that URL parameters may be visible to intermediate nodes on the network route (proxies, routers, etc), passing credentials in the body of the request is considered more secure than URL params. + * + * Example of passing credentials in the HTTP request body: + * ```` + * POST HTTP /login + * Content-Type: application/x-www-form-urlencoded + * + * client_id=CGc9B7v7J48dQSJvxxx&client_secret=nNVS9cSS3xNpSC9JdsBvvvvv + * ```` + * + * ### Best Practice: + * Always pass credentials in body params. Pass credentials in URL query params **only** when you cannot pass body params due to application, tool, or other limitations. + * + * For more information and detailed examples of Looker API authorization, see [How to Authenticate to Looker API3](https://github.com/looker/looker-sdk-ruby/blob/master/authentication.md). + * + */ + @Override + public void login(LoginRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/login", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + LoginResponse.Builder responseBuilder = LoginResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create an access token that runs as a given user. + * + * This can only be called by an authenticated admin user. It allows that admin to generate a new + * authentication token for the user with the given user id. That token can then be used for subsequent + * API calls - which are then performed *as* that target user. + * + * The target user does *not* need to have a pre-existing API client_id/client_secret pair. And, no such + * credentials are created by this call. + * + * This allows for building systems where api user authentication for an arbitrary number of users is done + * outside of Looker and funneled through a single 'service account' with admin permissions. Note that a + * new access token is generated on each call. If target users are going to be making numerous API + * calls in a short period then it is wise to cache this authentication token rather than call this before + * each of those API calls. + * + * See 'login' for more detail on the access token and how to use it. + * + */ + @Override + public void loginUser(LoginUserRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/login/{user_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + LoginUserResponse.Builder responseBuilder = LoginUserResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Logout of the API and invalidate the current access token. + * + */ + @Override + public void logout(LogoutRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/logout", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + LogoutResponse.Builder responseBuilder = LogoutResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + //#endregion ApiAuth: API Authentication + + //#region Auth: Manage User Authentication Configuration + + /** + * ### Create SSO Embed URL + * + * Creates an SSO embed URL and cryptographically signs it with an embed secret. + * This signed URL can then be used to instantiate a Looker embed session in a PBL web application. + * Do not make any modifications to this URL - any change may invalidate the signature and + * cause the URL to fail to load a Looker embed session. + * + * A signed SSO embed URL can only be used once. After it has been used to request a page from the + * Looker server, the URL is invalid. Future requests using the same URL will fail. This is to prevent + * 'replay attacks'. + * + * The `target_url` property must be a complete URL of a Looker UI page - scheme, hostname, path and query params. + * To load a dashboard with id 56 and with a filter of `Date=1 years`, the looker URL would look like `https:/myname.looker.com/dashboards/56?Date=1%20years`. + * The best way to obtain this target_url is to navigate to the desired Looker page in your web browser, + * copy the URL shown in the browser address bar and paste it into the `target_url` property as a quoted string value in this API request. + * + * Permissions for the embed user are defined by the groups in which the embed user is a member (group_ids property) + * and the lists of models and permissions assigned to the embed user. + * At a minimum, you must provide values for either the group_ids property, or both the models and permissions properties. + * These properties are additive; an embed user can be a member of certain groups AND be granted access to models and permissions. + * + * The embed user's access is the union of permissions granted by the group_ids, models, and permissions properties. + * + * This function does not strictly require all group_ids, user attribute names, or model names to exist at the moment the + * SSO embed url is created. Unknown group_id, user attribute names or model names will be passed through to the output URL. + * To diagnose potential problems with an SSO embed URL, you can copy the signed URL into the Embed URI Validator text box in `/admin/embed`. + * + * The `secret_id` parameter is optional. If specified, its value must be the id of an active secret defined in the Looker instance. + * if not specified, the URL will be signed using the newest active secret defined in the Looker instance. + * + * #### Security Note + * Protect this signed URL as you would an access token or password credentials - do not write + * it to disk, do not pass it to a third party, and only pass it through a secure HTTPS + * encrypted transport. + * + */ + @Override + public void createSsoEmbedUrl(CreateSsoEmbedUrlRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/embed/sso_url", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateSsoEmbedUrlResponse.Builder responseBuilder = CreateSsoEmbedUrlResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create an Embed URL + * + * Creates an embed URL that runs as the Looker user making this API call. ("Embed as me") + * This embed URL can then be used to instantiate a Looker embed session in a + * "Powered by Looker" (PBL) web application. + * + * This is similar to Private Embedding (https://docs.looker.com/r/admin/embed/private-embed). Instead of + * of logging into the Web UI to authenticate, the user has already authenticated against the API to be able to + * make this call. However, unlike Private Embed where the user has access to any other part of the Looker UI, + * the embed web session created by requesting the EmbedUrlResponse.url in a browser only has access to + * content visible under the `/embed` context. + * + * An embed URL can only be used once, and must be used within 5 minutes of being created. After it + * has been used to request a page from the Looker server, the URL is invalid. Future requests using + * the same URL will fail. This is to prevent 'replay attacks'. + * + * The `target_url` property must be a complete URL of a Looker Embedded UI page - scheme, hostname, path starting with "/embed" and query params. + * To load a dashboard with id 56 and with a filter of `Date=1 years`, the looker Embed URL would look like `https://myname.looker.com/embed/dashboards/56?Date=1%20years`. + * The best way to obtain this target_url is to navigate to the desired Looker page in your web browser, + * copy the URL shown in the browser address bar, insert "/embed" after the host/port, and paste it into the `target_url` property as a quoted string value in this API request. + * + * #### Security Note + * Protect this embed URL as you would an access token or password credentials - do not write + * it to disk, do not pass it to a third party, and only pass it through a secure HTTPS + * encrypted transport. + * + */ + @Override + public void createEmbedUrlAsMe(CreateEmbedUrlAsMeRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/embed/token_url/me", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateEmbedUrlAsMeResponse.Builder responseBuilder = CreateEmbedUrlAsMeResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get the LDAP configuration. + * + * Looker can be optionally configured to authenticate users against an Active Directory or other LDAP directory server. + * LDAP setup requires coordination with an administrator of that directory server. + * + * Only Looker administrators can read and update the LDAP configuration. + * + * Configuring LDAP impacts authentication for all users. This configuration should be done carefully. + * + * Looker maintains a single LDAP configuration. It can be read and updated. Updates only succeed if the new state will be valid (in the sense that all required fields are populated); it is up to you to ensure that the configuration is appropriate and correct). + * + * LDAP is enabled or disabled for Looker using the **enabled** field. + * + * Looker will never return an **auth_password** field. That value can be set, but never retrieved. + * + * See the [Looker LDAP docs](https://www.looker.com/docs/r/api/ldap_setup) for additional information. + * + */ + @Override + public void ldapConfig(LdapConfigRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/ldap_config", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + LdapConfigResponse.Builder responseBuilder = LdapConfigResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update the LDAP configuration. + * + * Configuring LDAP impacts authentication for all users. This configuration should be done carefully. + * + * Only Looker administrators can read and update the LDAP configuration. + * + * LDAP is enabled or disabled for Looker using the **enabled** field. + * + * It is **highly** recommended that any LDAP setting changes be tested using the APIs below before being set globally. + * + * See the [Looker LDAP docs](https://www.looker.com/docs/r/api/ldap_setup) for additional information. + * + */ + @Override + public void updateLdapConfig(UpdateLdapConfigRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/ldap_config", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateLdapConfigResponse.Builder responseBuilder = UpdateLdapConfigResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Test the connection settings for an LDAP configuration. + * + * This tests that the connection is possible given a connection_host and connection_port. + * + * **connection_host** and **connection_port** are required. **connection_tls** is optional. + * + * Example: + * ```json + * { + * "connection_host": "ldap.example.com", + * "connection_port": "636", + * "connection_tls": true + * } + * ``` + * + * No authentication to the LDAP server is attempted. + * + * The active LDAP settings are not modified. + * + */ + @Override + public void testLdapConfigConnection(TestLdapConfigConnectionRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.put("/ldap_config/test_connection", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + TestLdapConfigConnectionResponse.Builder responseBuilder = TestLdapConfigConnectionResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Test the connection authentication settings for an LDAP configuration. + * + * This tests that the connection is possible and that a 'server' account to be used by Looker can authenticate to the LDAP server given connection and authentication information. + * + * **connection_host**, **connection_port**, and **auth_username**, are required. **connection_tls** and **auth_password** are optional. + * + * Example: + * ```json + * { + * "connection_host": "ldap.example.com", + * "connection_port": "636", + * "connection_tls": true, + * "auth_username": "cn=looker,dc=example,dc=com", + * "auth_password": "secret" + * } + * ``` + * + * Looker will never return an **auth_password**. If this request omits the **auth_password** field, then the **auth_password** value from the active config (if present) will be used for the test. + * + * The active LDAP settings are not modified. + * + * + */ + @Override + public void testLdapConfigAuth(TestLdapConfigAuthRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.put("/ldap_config/test_auth", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + TestLdapConfigAuthResponse.Builder responseBuilder = TestLdapConfigAuthResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Test the user authentication settings for an LDAP configuration without authenticating the user. + * + * This test will let you easily test the mapping for user properties and roles for any user without needing to authenticate as that user. + * + * This test accepts a full LDAP configuration along with a username and attempts to find the full info for the user from the LDAP server without actually authenticating the user. So, user password is not required.The configuration is validated before attempting to contact the server. + * + * **test_ldap_user** is required. + * + * The active LDAP settings are not modified. + * + * + */ + @Override + public void testLdapConfigUserInfo(TestLdapConfigUserInfoRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.put("/ldap_config/test_user_info", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + TestLdapConfigUserInfoResponse.Builder responseBuilder = TestLdapConfigUserInfoResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Test the user authentication settings for an LDAP configuration. + * + * This test accepts a full LDAP configuration along with a username/password pair and attempts to authenticate the user with the LDAP server. The configuration is validated before attempting the authentication. + * + * Looker will never return an **auth_password**. If this request omits the **auth_password** field, then the **auth_password** value from the active config (if present) will be used for the test. + * + * **test_ldap_user** and **test_ldap_password** are required. + * + * The active LDAP settings are not modified. + * + * + */ + @Override + public void testLdapConfigUserAuth(TestLdapConfigUserAuthRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.put("/ldap_config/test_user_auth", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + TestLdapConfigUserAuthResponse.Builder responseBuilder = TestLdapConfigUserAuthResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### List All OAuth Client Apps + * + * Lists all applications registered to use OAuth2 login with this Looker instance, including + * enabled and disabled apps. + * + * Results are filtered to include only the apps that the caller (current user) + * has permission to see. + * + */ + @Override + public void allOauthClientApps(AllOauthClientAppsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/oauth_client_apps", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllOauthClientAppsResponse.Builder responseBuilder = AllOauthClientAppsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get Oauth Client App + * + * Returns the registered app client with matching client_guid. + * + */ + @Override + public void oauthClientApp(OauthClientAppRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/oauth_client_apps/{client_guid}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + OauthClientAppResponse.Builder responseBuilder = OauthClientAppResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Register an OAuth2 Client App + * + * Registers details identifying an external web app or native app as an OAuth2 login client of the Looker instance. + * The app registration must provide a unique client_guid and redirect_uri that the app will present + * in OAuth login requests. If the client_guid and redirect_uri parameters in the login request do not match + * the app details registered with the Looker instance, the request is assumed to be a forgery and is rejected. + * + */ + @Override + public void registerOauthClientApp(RegisterOauthClientAppRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/oauth_client_apps/{client_guid}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + RegisterOauthClientAppResponse.Builder responseBuilder = RegisterOauthClientAppResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update OAuth2 Client App Details + * + * Modifies the details a previously registered OAuth2 login client app. + * + */ + @Override + public void updateOauthClientApp(UpdateOauthClientAppRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/oauth_client_apps/{client_guid}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateOauthClientAppResponse.Builder responseBuilder = UpdateOauthClientAppResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Delete OAuth Client App + * + * Deletes the registration info of the app with the matching client_guid. + * All active sessions and tokens issued for this app will immediately become invalid. + * + * ### Note: this deletion cannot be undone. + * + */ + @Override + public void deleteOauthClientApp(DeleteOauthClientAppRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/oauth_client_apps/{client_guid}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteOauthClientAppResponse.Builder responseBuilder = DeleteOauthClientAppResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Invalidate All Issued Tokens + * + * Immediately invalidates all auth codes, sessions, access tokens and refresh tokens issued for + * this app for ALL USERS of this app. + * + */ + @Override + public void invalidateTokens(InvalidateTokensRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/oauth_client_apps/{client_guid}/tokens", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + InvalidateTokensResponse.Builder responseBuilder = InvalidateTokensResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Activate an app for a user + * + * Activates a user for a given oauth client app. This indicates the user has been informed that + * the app will have access to the user's looker data, and that the user has accepted and allowed + * the app to use their Looker account. + * + * Activating a user for an app that the user is already activated with returns a success response. + * + */ + @Override + public void activateAppUser(ActivateAppUserRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/oauth_client_apps/{client_guid}/users/{user_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ActivateAppUserResponse.Builder responseBuilder = ActivateAppUserResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Deactivate an app for a user + * + * Deactivate a user for a given oauth client app. All tokens issued to the app for + * this user will be invalid immediately. Before the user can use the app with their + * Looker account, the user will have to read and accept an account use disclosure statement for the app. + * + * Admin users can deactivate other users, but non-admin users can only deactivate themselves. + * + * As with most REST DELETE operations, this endpoint does not return an error if the indicated + * resource (app or user) does not exist or has already been deactivated. + * + */ + @Override + public void deactivateAppUser(DeactivateAppUserRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/oauth_client_apps/{client_guid}/users/{user_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeactivateAppUserResponse.Builder responseBuilder = DeactivateAppUserResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get the OIDC configuration. + * + * Looker can be optionally configured to authenticate users against an OpenID Connect (OIDC) + * authentication server. OIDC setup requires coordination with an administrator of that server. + * + * Only Looker administrators can read and update the OIDC configuration. + * + * Configuring OIDC impacts authentication for all users. This configuration should be done carefully. + * + * Looker maintains a single OIDC configuation. It can be read and updated. Updates only succeed if the new state will be valid (in the sense that all required fields are populated); it is up to you to ensure that the configuration is appropriate and correct). + * + * OIDC is enabled or disabled for Looker using the **enabled** field. + * + */ + @Override + public void oidcConfig(OidcConfigRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/oidc_config", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + OidcConfigResponse.Builder responseBuilder = OidcConfigResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update the OIDC configuration. + * + * Configuring OIDC impacts authentication for all users. This configuration should be done carefully. + * + * Only Looker administrators can read and update the OIDC configuration. + * + * OIDC is enabled or disabled for Looker using the **enabled** field. + * + * It is **highly** recommended that any OIDC setting changes be tested using the APIs below before being set globally. + * + */ + @Override + public void updateOidcConfig(UpdateOidcConfigRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/oidc_config", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateOidcConfigResponse.Builder responseBuilder = UpdateOidcConfigResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get a OIDC test configuration by test_slug. + * + */ + @Override + public void oidcTestConfig(OidcTestConfigRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/oidc_test_configs/{test_slug}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + OidcTestConfigResponse.Builder responseBuilder = OidcTestConfigResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Delete a OIDC test configuration. + * + */ + @Override + public void deleteOidcTestConfig(DeleteOidcTestConfigRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/oidc_test_configs/{test_slug}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteOidcTestConfigResponse.Builder responseBuilder = DeleteOidcTestConfigResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create a OIDC test configuration. + * + */ + @Override + public void createOidcTestConfig(CreateOidcTestConfigRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/oidc_test_configs", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateOidcTestConfigResponse.Builder responseBuilder = CreateOidcTestConfigResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get password config. + * + */ + @Override + public void passwordConfig(PasswordConfigRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/password_config", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + PasswordConfigResponse.Builder responseBuilder = PasswordConfigResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update password config. + * + */ + @Override + public void updatePasswordConfig(UpdatePasswordConfigRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/password_config", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdatePasswordConfigResponse.Builder responseBuilder = UpdatePasswordConfigResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Force all credentials_email users to reset their login passwords upon their next login. + * + */ + @Override + public void forcePasswordResetAtNextLoginForAllUsers(ForcePasswordResetAtNextLoginForAllUsersRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.put("/password_config/force_password_reset_at_next_login_for_all_users", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ForcePasswordResetAtNextLoginForAllUsersResponse.Builder responseBuilder = ForcePasswordResetAtNextLoginForAllUsersResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get the SAML configuration. + * + * Looker can be optionally configured to authenticate users against a SAML authentication server. + * SAML setup requires coordination with an administrator of that server. + * + * Only Looker administrators can read and update the SAML configuration. + * + * Configuring SAML impacts authentication for all users. This configuration should be done carefully. + * + * Looker maintains a single SAML configuation. It can be read and updated. Updates only succeed if the new state will be valid (in the sense that all required fields are populated); it is up to you to ensure that the configuration is appropriate and correct). + * + * SAML is enabled or disabled for Looker using the **enabled** field. + * + */ + @Override + public void samlConfig(SamlConfigRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/saml_config", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SamlConfigResponse.Builder responseBuilder = SamlConfigResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update the SAML configuration. + * + * Configuring SAML impacts authentication for all users. This configuration should be done carefully. + * + * Only Looker administrators can read and update the SAML configuration. + * + * SAML is enabled or disabled for Looker using the **enabled** field. + * + * It is **highly** recommended that any SAML setting changes be tested using the APIs below before being set globally. + * + */ + @Override + public void updateSamlConfig(UpdateSamlConfigRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/saml_config", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateSamlConfigResponse.Builder responseBuilder = UpdateSamlConfigResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get a SAML test configuration by test_slug. + * + */ + @Override + public void samlTestConfig(SamlTestConfigRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/saml_test_configs/{test_slug}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SamlTestConfigResponse.Builder responseBuilder = SamlTestConfigResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Delete a SAML test configuration. + * + */ + @Override + public void deleteSamlTestConfig(DeleteSamlTestConfigRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/saml_test_configs/{test_slug}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteSamlTestConfigResponse.Builder responseBuilder = DeleteSamlTestConfigResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create a SAML test configuration. + * + */ + @Override + public void createSamlTestConfig(CreateSamlTestConfigRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/saml_test_configs", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateSamlTestConfigResponse.Builder responseBuilder = CreateSamlTestConfigResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Parse the given xml as a SAML IdP metadata document and return the result. + * + */ + @Override + public void parseSamlIdpMetadata(ParseSamlIdpMetadataRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/parse_saml_idp_metadata", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ParseSamlIdpMetadataResponse.Builder responseBuilder = ParseSamlIdpMetadataResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Fetch the given url and parse it as a SAML IdP metadata document and return the result. + * Note that this requires that the url be public or at least at a location where the Looker instance + * can fetch it without requiring any special authentication. + * + */ + @Override + public void fetchAndParseSamlIdpMetadata(FetchAndParseSamlIdpMetadataRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/fetch_and_parse_saml_idp_metadata", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + FetchAndParseSamlIdpMetadataResponse.Builder responseBuilder = FetchAndParseSamlIdpMetadataResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get session config. + * + */ + @Override + public void sessionConfig(SessionConfigRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/session_config", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SessionConfigResponse.Builder responseBuilder = SessionConfigResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update session config. + * + */ + @Override + public void updateSessionConfig(UpdateSessionConfigRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/session_config", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateSessionConfigResponse.Builder responseBuilder = UpdateSessionConfigResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get currently locked-out users. + * + */ + @Override + public void allUserLoginLockouts(AllUserLoginLockoutsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/user_login_lockouts", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllUserLoginLockoutsResponse.Builder responseBuilder = AllUserLoginLockoutsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Search currently locked-out users. + * + */ + @Override + public void searchUserLoginLockouts(SearchUserLoginLockoutsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/user_login_lockouts/search", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SearchUserLoginLockoutsResponse.Builder responseBuilder = SearchUserLoginLockoutsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Removes login lockout for the associated user. + * + */ + @Override + public void deleteUserLoginLockout(DeleteUserLoginLockoutRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/user_login_lockout/{key}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteUserLoginLockoutResponse.Builder responseBuilder = DeleteUserLoginLockoutResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + //#endregion Auth: Manage User Authentication Configuration + + //#region Board: Manage Boards + + /** + * ### Get information about all boards. + * + */ + @Override + public void allBoards(AllBoardsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/boards", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllBoardsResponse.Builder responseBuilder = AllBoardsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create a new board. + * + */ + @Override + public void createBoard(CreateBoardRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/boards", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateBoardResponse.Builder responseBuilder = CreateBoardResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Search Boards + * + * If multiple search params are given and `filter_or` is FALSE or not specified, + * search params are combined in a logical AND operation. + * Only rows that match *all* search param criteria will be returned. + * + * If `filter_or` is TRUE, multiple search params are combined in a logical OR operation. + * Results will include rows that match **any** of the search criteria. + * + * String search params use case-insensitive matching. + * String search params can contain `%` and '_' as SQL LIKE pattern match wildcard expressions. + * example="dan%" will match "danger" and "Danzig" but not "David" + * example="D_m%" will match "Damage" and "dump" + * + * Integer search params can accept a single value or a comma separated list of values. The multiple + * values will be combined under a logical OR operation - results will match at least one of + * the given values. + * + * Most search params can accept "IS NULL" and "NOT NULL" as special expressions to match + * or exclude (respectively) rows where the column is null. + * + * Boolean search params accept only "true" and "false" as values. + * + * + */ + @Override + public void searchBoards(SearchBoardsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/boards/search", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SearchBoardsResponse.Builder responseBuilder = SearchBoardsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about a board. + * + */ + @Override + public void board(BoardRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/boards/{board_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + BoardResponse.Builder responseBuilder = BoardResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update a board definition. + * + */ + @Override + public void updateBoard(UpdateBoardRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/boards/{board_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateBoardResponse.Builder responseBuilder = UpdateBoardResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Delete a board. + * + */ + @Override + public void deleteBoard(DeleteBoardRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/boards/{board_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteBoardResponse.Builder responseBuilder = DeleteBoardResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about all board items. + * + */ + @Override + public void allBoardItems(AllBoardItemsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/board_items", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllBoardItemsResponse.Builder responseBuilder = AllBoardItemsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create a new board item. + * + */ + @Override + public void createBoardItem(CreateBoardItemRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/board_items", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateBoardItemResponse.Builder responseBuilder = CreateBoardItemResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about a board item. + * + */ + @Override + public void boardItem(BoardItemRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/board_items/{board_item_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + BoardItemResponse.Builder responseBuilder = BoardItemResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update a board item definition. + * + */ + @Override + public void updateBoardItem(UpdateBoardItemRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/board_items/{board_item_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateBoardItemResponse.Builder responseBuilder = UpdateBoardItemResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Delete a board item. + * + */ + @Override + public void deleteBoardItem(DeleteBoardItemRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/board_items/{board_item_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteBoardItemResponse.Builder responseBuilder = DeleteBoardItemResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about all board sections. + * + */ + @Override + public void allBoardSections(AllBoardSectionsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/board_sections", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllBoardSectionsResponse.Builder responseBuilder = AllBoardSectionsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create a new board section. + * + */ + @Override + public void createBoardSection(CreateBoardSectionRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/board_sections", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateBoardSectionResponse.Builder responseBuilder = CreateBoardSectionResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about a board section. + * + */ + @Override + public void boardSection(BoardSectionRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/board_sections/{board_section_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + BoardSectionResponse.Builder responseBuilder = BoardSectionResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update a board section definition. + * + */ + @Override + public void updateBoardSection(UpdateBoardSectionRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/board_sections/{board_section_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateBoardSectionResponse.Builder responseBuilder = UpdateBoardSectionResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Delete a board section. + * + */ + @Override + public void deleteBoardSection(DeleteBoardSectionRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/board_sections/{board_section_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteBoardSectionResponse.Builder responseBuilder = DeleteBoardSectionResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + //#endregion Board: Manage Boards + + //#region ColorCollection: Manage Color Collections + + /** + * ### Get an array of all existing Color Collections + * Get a **single** color collection by id with [ColorCollection](#!/ColorCollection/color_collection) + * + * Get all **standard** color collections with [ColorCollection](#!/ColorCollection/color_collections_standard) + * + * Get all **custom** color collections with [ColorCollection](#!/ColorCollection/color_collections_custom) + * + * **Note**: Only an API user with the Admin role can call this endpoint. Unauthorized requests will return `Not Found` (404) errors. + * + * + */ + @Override + public void allColorCollections(AllColorCollectionsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/color_collections", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllColorCollectionsResponse.Builder responseBuilder = AllColorCollectionsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create a custom color collection with the specified information + * + * Creates a new custom color collection object, returning the details, including the created id. + * + * **Update** an existing color collection with [Update Color Collection](#!/ColorCollection/update_color_collection) + * + * **Permanently delete** an existing custom color collection with [Delete Color Collection](#!/ColorCollection/delete_color_collection) + * + * **Note**: Only an API user with the Admin role can call this endpoint. Unauthorized requests will return `Not Found` (404) errors. + * + * + */ + @Override + public void createColorCollection(CreateColorCollectionRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/color_collections", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateColorCollectionResponse.Builder responseBuilder = CreateColorCollectionResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get an array of all existing **Custom** Color Collections + * Get a **single** color collection by id with [ColorCollection](#!/ColorCollection/color_collection) + * + * Get all **standard** color collections with [ColorCollection](#!/ColorCollection/color_collections_standard) + * + * **Note**: Only an API user with the Admin role can call this endpoint. Unauthorized requests will return `Not Found` (404) errors. + * + * + */ + @Override + public void colorCollectionsCustom(ColorCollectionsCustomRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/color_collections/custom", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ColorCollectionsCustomResponse.Builder responseBuilder = ColorCollectionsCustomResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get an array of all existing **Standard** Color Collections + * Get a **single** color collection by id with [ColorCollection](#!/ColorCollection/color_collection) + * + * Get all **custom** color collections with [ColorCollection](#!/ColorCollection/color_collections_custom) + * + * **Note**: Only an API user with the Admin role can call this endpoint. Unauthorized requests will return `Not Found` (404) errors. + * + * + */ + @Override + public void colorCollectionsStandard(ColorCollectionsStandardRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/color_collections/standard", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ColorCollectionsStandardResponse.Builder responseBuilder = ColorCollectionsStandardResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get the default color collection + * + * Use this to retrieve the default Color Collection. + * + * Set the default color collection with [ColorCollection](#!/ColorCollection/set_default_color_collection) + * + */ + @Override + public void defaultColorCollection(DefaultColorCollectionRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/color_collections/default", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DefaultColorCollectionResponse.Builder responseBuilder = DefaultColorCollectionResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Set the global default Color Collection by ID + * + * Returns the new specified default Color Collection object. + * **Note**: Only an API user with the Admin role can call this endpoint. Unauthorized requests will return `Not Found` (404) errors. + * + * + */ + @Override + public void setDefaultColorCollection(SetDefaultColorCollectionRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.put("/color_collections/default", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SetDefaultColorCollectionResponse.Builder responseBuilder = SetDefaultColorCollectionResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get a Color Collection by ID + * + * Use this to retrieve a specific Color Collection. + * Get a **single** color collection by id with [ColorCollection](#!/ColorCollection/color_collection) + * + * Get all **standard** color collections with [ColorCollection](#!/ColorCollection/color_collections_standard) + * + * Get all **custom** color collections with [ColorCollection](#!/ColorCollection/color_collections_custom) + * + * **Note**: Only an API user with the Admin role can call this endpoint. Unauthorized requests will return `Not Found` (404) errors. + * + * + */ + @Override + public void colorCollection(ColorCollectionRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/color_collections/{collection_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ColorCollectionResponse.Builder responseBuilder = ColorCollectionResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update a custom color collection by id. + * **Note**: Only an API user with the Admin role can call this endpoint. Unauthorized requests will return `Not Found` (404) errors. + * + * + */ + @Override + public void updateColorCollection(UpdateColorCollectionRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/color_collections/{collection_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateColorCollectionResponse.Builder responseBuilder = UpdateColorCollectionResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Delete a custom color collection by id + * + * This operation permanently deletes the identified **Custom** color collection. + * + * **Standard** color collections cannot be deleted + * + * Because multiple color collections can have the same label, they must be deleted by ID, not name. + * **Note**: Only an API user with the Admin role can call this endpoint. Unauthorized requests will return `Not Found` (404) errors. + * + * + */ + @Override + public void deleteColorCollection(DeleteColorCollectionRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/color_collections/{collection_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteColorCollectionResponse.Builder responseBuilder = DeleteColorCollectionResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + //#endregion ColorCollection: Manage Color Collections + + //#region Command: Manage Commands + + /** + * ### Get All Commands. + * + */ + @Override + public void getAllCommands(GetAllCommandsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/commands", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + GetAllCommandsResponse.Builder responseBuilder = GetAllCommandsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create a new command. + * # Required fields: [:name, :linked_content_id, :linked_content_type] + * # `linked_content_type` must be one of ["dashboard", "lookml_dashboard"] + * # + * + */ + @Override + public void createCommand(CreateCommandRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/commands", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateCommandResponse.Builder responseBuilder = CreateCommandResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update an existing custom command. + * # Optional fields: ['name', 'description'] + * # + * + */ + @Override + public void updateCommand(UpdateCommandRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/commands/{command_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateCommandResponse.Builder responseBuilder = UpdateCommandResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Delete an existing custom command. + * + */ + @Override + public void deleteCommand(DeleteCommandRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/commands/{command_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteCommandResponse.Builder responseBuilder = DeleteCommandResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + //#endregion Command: Manage Commands + + //#region Config: Manage General Configuration + + /** + * Get the current Cloud Storage Configuration. + * + */ + @Override + public void cloudStorageConfiguration(CloudStorageConfigurationRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/cloud_storage", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CloudStorageConfigurationResponse.Builder responseBuilder = CloudStorageConfigurationResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * Update the current Cloud Storage Configuration. + * + */ + @Override + public void updateCloudStorageConfiguration(UpdateCloudStorageConfigurationRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/cloud_storage", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateCloudStorageConfigurationResponse.Builder responseBuilder = UpdateCloudStorageConfigurationResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get the current status and content of custom welcome emails + * + */ + @Override + public void customWelcomeEmail(CustomWelcomeEmailRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/custom_welcome_email", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CustomWelcomeEmailResponse.Builder responseBuilder = CustomWelcomeEmailResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * Update custom welcome email setting and values. Optionally send a test email with the new content to the currently logged in user. + * + */ + @Override + public void updateCustomWelcomeEmail(UpdateCustomWelcomeEmailRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/custom_welcome_email", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateCustomWelcomeEmailResponse.Builder responseBuilder = UpdateCustomWelcomeEmailResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * Requests to this endpoint will send a welcome email with the custom content provided in the body to the currently logged in user. + * + */ + @Override + public void updateCustomWelcomeEmailTest(UpdateCustomWelcomeEmailTestRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.put("/custom_welcome_email_test", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateCustomWelcomeEmailTestResponse.Builder responseBuilder = UpdateCustomWelcomeEmailTestResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Retrieve the value for whether or not digest emails is enabled + * + */ + @Override + public void digestEmailsEnabled(DigestEmailsEnabledRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/digest_emails_enabled", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DigestEmailsEnabledResponse.Builder responseBuilder = DigestEmailsEnabledResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update the setting for enabling/disabling digest emails + * + */ + @Override + public void updateDigestEmailsEnabled(UpdateDigestEmailsEnabledRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/digest_emails_enabled", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateDigestEmailsEnabledResponse.Builder responseBuilder = UpdateDigestEmailsEnabledResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Trigger the generation of digest email records and send them to Looker's internal system. This does not send + * any actual emails, it generates records containing content which may be of interest for users who have become inactive. + * Emails will be sent at a later time from Looker's internal system if the Digest Emails feature is enabled in settings. + */ + @Override + public void createDigestEmailSend(CreateDigestEmailSendRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/digest_email_send", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateDigestEmailSendResponse.Builder responseBuilder = CreateDigestEmailSendResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Set the menu item name and content for internal help resources + * + */ + @Override + public void internalHelpResourcesContent(InternalHelpResourcesContentRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/internal_help_resources_content", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + InternalHelpResourcesContentResponse.Builder responseBuilder = InternalHelpResourcesContentResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * Update internal help resources content + * + */ + @Override + public void updateInternalHelpResourcesContent(UpdateInternalHelpResourcesContentRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/internal_help_resources_content", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateInternalHelpResourcesContentResponse.Builder responseBuilder = UpdateInternalHelpResourcesContentResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get and set the options for internal help resources + * + */ + @Override + public void internalHelpResources(InternalHelpResourcesRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/internal_help_resources_enabled", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + InternalHelpResourcesResponse.Builder responseBuilder = InternalHelpResourcesResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * Update internal help resources settings + * + */ + @Override + public void updateInternalHelpResources(UpdateInternalHelpResourcesRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/internal_help_resources", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateInternalHelpResourcesResponse.Builder responseBuilder = UpdateInternalHelpResourcesResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get all legacy features. + * + */ + @Override + public void allLegacyFeatures(AllLegacyFeaturesRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/legacy_features", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllLegacyFeaturesResponse.Builder responseBuilder = AllLegacyFeaturesResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about the legacy feature with a specific id. + * + */ + @Override + public void legacyFeature(LegacyFeatureRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/legacy_features/{legacy_feature_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + LegacyFeatureResponse.Builder responseBuilder = LegacyFeatureResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update information about the legacy feature with a specific id. + * + */ + @Override + public void updateLegacyFeature(UpdateLegacyFeatureRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/legacy_features/{legacy_feature_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateLegacyFeatureResponse.Builder responseBuilder = UpdateLegacyFeatureResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get a list of locales that Looker supports. + * + */ + @Override + public void allLocales(AllLocalesRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/locales", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllLocalesResponse.Builder responseBuilder = AllLocalesResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get a list of timezones that Looker supports (e.g. useful for scheduling tasks). + * + */ + @Override + public void allTimezones(AllTimezonesRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/timezones", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllTimezonesResponse.Builder responseBuilder = AllTimezonesResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about all API versions supported by this Looker instance. + * + */ + @Override + public void versions(VersionsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/versions", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + VersionsResponse.Builder responseBuilder = VersionsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### This feature is enabled only by special license. + * ### Gets the whitelabel configuration, which includes hiding documentation links, custom favicon uploading, etc. + * + */ + @Override + public void whitelabelConfiguration(WhitelabelConfigurationRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/whitelabel_configuration", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + WhitelabelConfigurationResponse.Builder responseBuilder = WhitelabelConfigurationResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update the whitelabel configuration + * + */ + @Override + public void updateWhitelabelConfiguration(UpdateWhitelabelConfigurationRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.put("/whitelabel_configuration", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateWhitelabelConfigurationResponse.Builder responseBuilder = UpdateWhitelabelConfigurationResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + //#endregion Config: Manage General Configuration + + //#region Connection: Manage Database Connections + + /** + * ### Get information about all connections. + * + */ + @Override + public void allConnections(AllConnectionsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/connections", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllConnectionsResponse.Builder responseBuilder = AllConnectionsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create a connection using the specified configuration. + * + */ + @Override + public void createConnection(CreateConnectionRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/connections", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateConnectionResponse.Builder responseBuilder = CreateConnectionResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about a connection. + * + */ + @Override + public void connection(ConnectionRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/connections/{connection_name}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ConnectionResponse.Builder responseBuilder = ConnectionResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update a connection using the specified configuration. + * + */ + @Override + public void updateConnection(UpdateConnectionRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/connections/{connection_name}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateConnectionResponse.Builder responseBuilder = UpdateConnectionResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Delete a connection. + * + */ + @Override + public void deleteConnection(DeleteConnectionRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/connections/{connection_name}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteConnectionResponse.Builder responseBuilder = DeleteConnectionResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Delete a connection override. + * + */ + @Override + public void deleteConnectionOverride(DeleteConnectionOverrideRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/connections/{connection_name}/connection_override/{override_context}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteConnectionOverrideResponse.Builder responseBuilder = DeleteConnectionOverrideResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Test an existing connection. + * + * Note that a connection's 'dialect' property has a 'connection_tests' property that lists the + * specific types of tests that the connection supports. + * + * This API is rate limited. + * + * Unsupported tests in the request will be ignored. + * + */ + @Override + public void testConnection(TestConnectionRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.put("/connections/{connection_name}/test", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + TestConnectionResponse.Builder responseBuilder = TestConnectionResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Test a connection configuration. + * + * Note that a connection's 'dialect' property has a 'connection_tests' property that lists the + * specific types of tests that the connection supports. + * + * This API is rate limited. + * + * Unsupported tests in the request will be ignored. + * + */ + @Override + public void testConnectionConfig(TestConnectionConfigRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.put("/connections/test", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + TestConnectionConfigResponse.Builder responseBuilder = TestConnectionConfigResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about all dialects. + * + */ + @Override + public void allDialectInfos(AllDialectInfosRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/dialect_info", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllDialectInfosResponse.Builder responseBuilder = AllDialectInfosResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get all External OAuth Applications. + * + */ + @Override + public void allExternalOauthApplications(AllExternalOauthApplicationsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/external_oauth_applications", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllExternalOauthApplicationsResponse.Builder responseBuilder = AllExternalOauthApplicationsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create an OAuth Application using the specified configuration. + * + */ + @Override + public void createExternalOauthApplication(CreateExternalOauthApplicationRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/external_oauth_applications", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateExternalOauthApplicationResponse.Builder responseBuilder = CreateExternalOauthApplicationResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about all SSH Servers. + * + */ + @Override + public void allSshServers(AllSshServersRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/ssh_servers", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllSshServersResponse.Builder responseBuilder = AllSshServersResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create an SSH Server. + * + */ + @Override + public void createSshServer(CreateSshServerRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/ssh_servers", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateSshServerResponse.Builder responseBuilder = CreateSshServerResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about an SSH Server. + * + */ + @Override + public void sshServer(SshServerRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/ssh_server/{ssh_server_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SshServerResponse.Builder responseBuilder = SshServerResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update an SSH Server. + * + */ + @Override + public void updateSshServer(UpdateSshServerRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/ssh_server/{ssh_server_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateSshServerResponse.Builder responseBuilder = UpdateSshServerResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Delete an SSH Server. + * + */ + @Override + public void deleteSshServer(DeleteSshServerRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/ssh_server/{ssh_server_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteSshServerResponse.Builder responseBuilder = DeleteSshServerResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Test the SSH Server + * + */ + @Override + public void testSshServer(TestSshServerRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/ssh_server/{ssh_server_id}/test", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + TestSshServerResponse.Builder responseBuilder = TestSshServerResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about all SSH Tunnels. + * + */ + @Override + public void allSshTunnels(AllSshTunnelsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/ssh_tunnels", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllSshTunnelsResponse.Builder responseBuilder = AllSshTunnelsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create an SSH Tunnel + * + */ + @Override + public void createSshTunnel(CreateSshTunnelRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/ssh_tunnels", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateSshTunnelResponse.Builder responseBuilder = CreateSshTunnelResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about an SSH Tunnel. + * + */ + @Override + public void sshTunnel(SshTunnelRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/ssh_tunnel/{ssh_tunnel_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SshTunnelResponse.Builder responseBuilder = SshTunnelResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update an SSH Tunnel + * + */ + @Override + public void updateSshTunnel(UpdateSshTunnelRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/ssh_tunnel/{ssh_tunnel_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateSshTunnelResponse.Builder responseBuilder = UpdateSshTunnelResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Delete an SSH Tunnel + * + */ + @Override + public void deleteSshTunnel(DeleteSshTunnelRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/ssh_tunnel/{ssh_tunnel_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteSshTunnelResponse.Builder responseBuilder = DeleteSshTunnelResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Test the SSH Tunnel + * + */ + @Override + public void testSshTunnel(TestSshTunnelRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/ssh_tunnel/{ssh_tunnel_id}/test", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + TestSshTunnelResponse.Builder responseBuilder = TestSshTunnelResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get the SSH public key + * + * Get the public key created for this instance to identify itself to a remote SSH server. + * + */ + @Override + public void sshPublicKey(SshPublicKeyRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/ssh_public_key", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SshPublicKeyResponse.Builder responseBuilder = SshPublicKeyResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + //#endregion Connection: Manage Database Connections + + //#region Content: Manage Content + + /** + * ### Search Favorite Content + * + * If multiple search params are given and `filter_or` is FALSE or not specified, + * search params are combined in a logical AND operation. + * Only rows that match *all* search param criteria will be returned. + * + * If `filter_or` is TRUE, multiple search params are combined in a logical OR operation. + * Results will include rows that match **any** of the search criteria. + * + * String search params use case-insensitive matching. + * String search params can contain `%` and '_' as SQL LIKE pattern match wildcard expressions. + * example="dan%" will match "danger" and "Danzig" but not "David" + * example="D_m%" will match "Damage" and "dump" + * + * Integer search params can accept a single value or a comma separated list of values. The multiple + * values will be combined under a logical OR operation - results will match at least one of + * the given values. + * + * Most search params can accept "IS NULL" and "NOT NULL" as special expressions to match + * or exclude (respectively) rows where the column is null. + * + * Boolean search params accept only "true" and "false" as values. + * + * + */ + @Override + public void searchContentFavorites(SearchContentFavoritesRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/content_favorite/search", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SearchContentFavoritesResponse.Builder responseBuilder = SearchContentFavoritesResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get favorite content by its id + */ + @Override + public void contentFavorite(ContentFavoriteRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/content_favorite/{content_favorite_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ContentFavoriteResponse.Builder responseBuilder = ContentFavoriteResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Delete favorite content + */ + @Override + public void deleteContentFavorite(DeleteContentFavoriteRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/content_favorite/{content_favorite_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteContentFavoriteResponse.Builder responseBuilder = DeleteContentFavoriteResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create favorite content + */ + @Override + public void createContentFavorite(CreateContentFavoriteRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/content_favorite", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateContentFavoriteResponse.Builder responseBuilder = CreateContentFavoriteResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about all content metadata in a space. + * + */ + @Override + public void allContentMetadatas(AllContentMetadatasRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/content_metadata", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllContentMetadatasResponse.Builder responseBuilder = AllContentMetadatasResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about an individual content metadata record. + * + */ + @Override + public void contentMetadata(ContentMetadataRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/content_metadata/{content_metadata_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ContentMetadataResponse.Builder responseBuilder = ContentMetadataResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Move a piece of content. + * + */ + @Override + public void updateContentMetadata(UpdateContentMetadataRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/content_metadata/{content_metadata_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateContentMetadataResponse.Builder responseBuilder = UpdateContentMetadataResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### All content metadata access records for a content metadata item. + * + */ + @Override + public void allContentMetadataAccesses(AllContentMetadataAccessesRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/content_metadata_access", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllContentMetadataAccessesResponse.Builder responseBuilder = AllContentMetadataAccessesResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create content metadata access. + * + */ + @Override + public void createContentMetadataAccess(CreateContentMetadataAccessRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/content_metadata_access", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateContentMetadataAccessResponse.Builder responseBuilder = CreateContentMetadataAccessResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update type of access for content metadata. + * + */ + @Override + public void updateContentMetadataAccess(UpdateContentMetadataAccessRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.put("/content_metadata_access/{content_metadata_access_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateContentMetadataAccessResponse.Builder responseBuilder = UpdateContentMetadataAccessResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Remove content metadata access. + * + */ + @Override + public void deleteContentMetadataAccess(DeleteContentMetadataAccessRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/content_metadata_access/{content_metadata_access_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteContentMetadataAccessResponse.Builder responseBuilder = DeleteContentMetadataAccessResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get an image representing the contents of a dashboard or look. + * + * The returned thumbnail is an abstract representation of the contents of a dashbord or look and does not + * reflect the actual data displayed in the respective visualizations. + * + */ + @Override + public void contentThumbnail(ContentThumbnailRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/content_thumbnail/{type}/{resource_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ContentThumbnailResponse.Builder responseBuilder = ContentThumbnailResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Validate All Content + * + * Performs validation of all looks and dashboards + * Returns a list of errors found as well as metadata about the content validation run. + * + */ + @Override + public void contentValidation(ContentValidationRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/content_validation", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ContentValidationResponse.Builder responseBuilder = ContentValidationResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Search Content Views + * + * If multiple search params are given and `filter_or` is FALSE or not specified, + * search params are combined in a logical AND operation. + * Only rows that match *all* search param criteria will be returned. + * + * If `filter_or` is TRUE, multiple search params are combined in a logical OR operation. + * Results will include rows that match **any** of the search criteria. + * + * String search params use case-insensitive matching. + * String search params can contain `%` and '_' as SQL LIKE pattern match wildcard expressions. + * example="dan%" will match "danger" and "Danzig" but not "David" + * example="D_m%" will match "Damage" and "dump" + * + * Integer search params can accept a single value or a comma separated list of values. The multiple + * values will be combined under a logical OR operation - results will match at least one of + * the given values. + * + * Most search params can accept "IS NULL" and "NOT NULL" as special expressions to match + * or exclude (respectively) rows where the column is null. + * + * Boolean search params accept only "true" and "false" as values. + * + * + */ + @Override + public void searchContentViews(SearchContentViewsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/content_view/search", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SearchContentViewsResponse.Builder responseBuilder = SearchContentViewsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get a vector image representing the contents of a dashboard or look. + * + * # DEPRECATED: Use [content_thumbnail()](#!/Content/content_thumbnail) + * + * The returned thumbnail is an abstract representation of the contents of a dashbord or look and does not + * reflect the actual data displayed in the respective visualizations. + * + */ + @Override + public void vectorThumbnail(VectorThumbnailRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/vector_thumbnail/{type}/{resource_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + VectorThumbnailResponse.Builder responseBuilder = VectorThumbnailResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + //#endregion Content: Manage Content + + //#region Dashboard: Manage Dashboards + + /** + * ### Get information about all active dashboards. + * + * Returns an array of **abbreviated dashboard objects**. Dashboards marked as deleted are excluded from this list. + * + * Get the **full details** of a specific dashboard by id with [dashboard()](#!/Dashboard/dashboard) + * + * Find **deleted dashboards** with [search_dashboards()](#!/Dashboard/search_dashboards) + * + */ + @Override + public void allDashboards(AllDashboardsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/dashboards", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllDashboardsResponse.Builder responseBuilder = AllDashboardsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create a new dashboard + * + * Creates a new dashboard object and returns the details of the newly created dashboard. + * + * `Title`, `user_id`, and `space_id` are all required fields. + * `Space_id` and `user_id` must contain the id of an existing space or user, respectively. + * A dashboard's `title` must be unique within the space in which it resides. + * + * If you receive a 422 error response when creating a dashboard, be sure to look at the + * response body for information about exactly which fields are missing or contain invalid data. + * + * You can **update** an existing dashboard with [update_dashboard()](#!/Dashboard/update_dashboard) + * + * You can **permanently delete** an existing dashboard with [delete_dashboard()](#!/Dashboard/delete_dashboard) + * + */ + @Override + public void createDashboard(CreateDashboardRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/dashboards", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateDashboardResponse.Builder responseBuilder = CreateDashboardResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Search Dashboards + * + * Returns an **array of dashboard objects** that match the specified search criteria. + * + * If multiple search params are given and `filter_or` is FALSE or not specified, + * search params are combined in a logical AND operation. + * Only rows that match *all* search param criteria will be returned. + * + * If `filter_or` is TRUE, multiple search params are combined in a logical OR operation. + * Results will include rows that match **any** of the search criteria. + * + * String search params use case-insensitive matching. + * String search params can contain `%` and '_' as SQL LIKE pattern match wildcard expressions. + * example="dan%" will match "danger" and "Danzig" but not "David" + * example="D_m%" will match "Damage" and "dump" + * + * Integer search params can accept a single value or a comma separated list of values. The multiple + * values will be combined under a logical OR operation - results will match at least one of + * the given values. + * + * Most search params can accept "IS NULL" and "NOT NULL" as special expressions to match + * or exclude (respectively) rows where the column is null. + * + * Boolean search params accept only "true" and "false" as values. + * + * + * The parameters `limit`, and `offset` are recommended for fetching results in page-size chunks. + * + * Get a **single dashboard** by id with [dashboard()](#!/Dashboard/dashboard) + * + */ + @Override + public void searchDashboards(SearchDashboardsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/dashboards/search", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SearchDashboardsResponse.Builder responseBuilder = SearchDashboardsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Import a LookML dashboard to a space as a UDD + * Creates a UDD (a dashboard which exists in the Looker database rather than as a LookML file) from the LookML dashboard + * and puts it in the space specified. The created UDD will have a lookml_link_id which links to the original LookML dashboard. + * + * To give the imported dashboard specify a (e.g. title: "my title") in the body of your request, otherwise the imported + * dashboard will have the same title as the original LookML dashboard. + * + * For this operation to succeed the user must have permission to see the LookML dashboard in question, and have permission to + * create content in the space the dashboard is being imported to. + * + * **Sync** a linked UDD with [sync_lookml_dashboard()](#!/Dashboard/sync_lookml_dashboard) + * **Unlink** a linked UDD by setting lookml_link_id to null with [update_dashboard()](#!/Dashboard/update_dashboard) + * + */ + @Override + public void importLookmlDashboard(ImportLookmlDashboardRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/dashboards/{lookml_dashboard_id}/import/{space_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ImportLookmlDashboardResponse.Builder responseBuilder = ImportLookmlDashboardResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update all linked dashboards to match the specified LookML dashboard. + * + * Any UDD (a dashboard which exists in the Looker database rather than as a LookML file) which has a `lookml_link_id` + * property value referring to a LookML dashboard's id (model::dashboardname) will be updated so that it matches the current state of the LookML dashboard. + * + * For this operation to succeed the user must have permission to view the LookML dashboard, and only linked dashboards + * that the user has permission to update will be synced. + * + * To **link** or **unlink** a UDD set the `lookml_link_id` property with [update_dashboard()](#!/Dashboard/update_dashboard) + * + */ + @Override + public void syncLookmlDashboard(SyncLookmlDashboardRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/dashboards/{lookml_dashboard_id}/sync", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SyncLookmlDashboardResponse.Builder responseBuilder = SyncLookmlDashboardResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about a dashboard + * + * Returns the full details of the identified dashboard object + * + * Get a **summary list** of all active dashboards with [all_dashboards()](#!/Dashboard/all_dashboards) + * + * You can **Search** for dashboards with [search_dashboards()](#!/Dashboard/search_dashboards) + * + */ + @Override + public void dashboard(DashboardRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/dashboards/{dashboard_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DashboardResponse.Builder responseBuilder = DashboardResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update a dashboard + * + * You can use this function to change the string and integer properties of + * a dashboard. Nested objects such as filters, dashboard elements, or dashboard layout components + * cannot be modified by this function - use the update functions for the respective + * nested object types (like [update_dashboard_filter()](#!/3.1/Dashboard/update_dashboard_filter) to change a filter) + * to modify nested objects referenced by a dashboard. + * + * If you receive a 422 error response when updating a dashboard, be sure to look at the + * response body for information about exactly which fields are missing or contain invalid data. + * + */ + @Override + public void updateDashboard(UpdateDashboardRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/dashboards/{dashboard_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateDashboardResponse.Builder responseBuilder = UpdateDashboardResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Delete the dashboard with the specified id + * + * Permanently **deletes** a dashboard. (The dashboard cannot be recovered after this operation.) + * + * "Soft" delete or hide a dashboard by setting its `deleted` status to `True` with [update_dashboard()](#!/Dashboard/update_dashboard). + * + * Note: When a dashboard is deleted in the UI, it is soft deleted. Use this API call to permanently remove it, if desired. + * + */ + @Override + public void deleteDashboard(DeleteDashboardRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/dashboards/{dashboard_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteDashboardResponse.Builder responseBuilder = DeleteDashboardResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get Aggregate Table LookML for Each Query on a Dahboard + * + * Returns a JSON object that contains the dashboard id and Aggregate Table lookml + * + * + */ + @Override + public void dashboardAggregateTableLookml(DashboardAggregateTableLookmlRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/dashboards/aggregate_table_lookml/{dashboard_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DashboardAggregateTableLookmlResponse.Builder responseBuilder = DashboardAggregateTableLookmlResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get lookml of a UDD + * + * Returns a JSON object that contains the dashboard id and the full lookml + * + * + */ + @Override + public void dashboardLookml(DashboardLookmlRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/dashboards/lookml/{dashboard_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DashboardLookmlResponse.Builder responseBuilder = DashboardLookmlResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Search Dashboard Elements + * + * Returns an **array of DashboardElement objects** that match the specified search criteria. + * + * If multiple search params are given and `filter_or` is FALSE or not specified, + * search params are combined in a logical AND operation. + * Only rows that match *all* search param criteria will be returned. + * + * If `filter_or` is TRUE, multiple search params are combined in a logical OR operation. + * Results will include rows that match **any** of the search criteria. + * + * String search params use case-insensitive matching. + * String search params can contain `%` and '_' as SQL LIKE pattern match wildcard expressions. + * example="dan%" will match "danger" and "Danzig" but not "David" + * example="D_m%" will match "Damage" and "dump" + * + * Integer search params can accept a single value or a comma separated list of values. The multiple + * values will be combined under a logical OR operation - results will match at least one of + * the given values. + * + * Most search params can accept "IS NULL" and "NOT NULL" as special expressions to match + * or exclude (respectively) rows where the column is null. + * + * Boolean search params accept only "true" and "false" as values. + * + * + */ + @Override + public void searchDashboardElements(SearchDashboardElementsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/dashboard_elements/search", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SearchDashboardElementsResponse.Builder responseBuilder = SearchDashboardElementsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about the dashboard element with a specific id. + */ + @Override + public void dashboardElement(DashboardElementRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/dashboard_elements/{dashboard_element_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DashboardElementResponse.Builder responseBuilder = DashboardElementResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update the dashboard element with a specific id. + */ + @Override + public void updateDashboardElement(UpdateDashboardElementRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/dashboard_elements/{dashboard_element_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateDashboardElementResponse.Builder responseBuilder = UpdateDashboardElementResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Delete a dashboard element with a specific id. + */ + @Override + public void deleteDashboardElement(DeleteDashboardElementRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/dashboard_elements/{dashboard_element_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteDashboardElementResponse.Builder responseBuilder = DeleteDashboardElementResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about all the dashboard elements on a dashboard with a specific id. + */ + @Override + public void dashboardDashboardElements(DashboardDashboardElementsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/dashboards/{dashboard_id}/dashboard_elements", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DashboardDashboardElementsResponse.Builder responseBuilder = DashboardDashboardElementsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create a dashboard element on the dashboard with a specific id. + */ + @Override + public void createDashboardElement(CreateDashboardElementRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/dashboard_elements", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateDashboardElementResponse.Builder responseBuilder = CreateDashboardElementResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about the dashboard filters with a specific id. + */ + @Override + public void dashboardFilter(DashboardFilterRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/dashboard_filters/{dashboard_filter_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DashboardFilterResponse.Builder responseBuilder = DashboardFilterResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update the dashboard filter with a specific id. + */ + @Override + public void updateDashboardFilter(UpdateDashboardFilterRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/dashboard_filters/{dashboard_filter_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateDashboardFilterResponse.Builder responseBuilder = UpdateDashboardFilterResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Delete a dashboard filter with a specific id. + */ + @Override + public void deleteDashboardFilter(DeleteDashboardFilterRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/dashboard_filters/{dashboard_filter_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteDashboardFilterResponse.Builder responseBuilder = DeleteDashboardFilterResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about all the dashboard filters on a dashboard with a specific id. + */ + @Override + public void dashboardDashboardFilters(DashboardDashboardFiltersRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/dashboards/{dashboard_id}/dashboard_filters", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DashboardDashboardFiltersResponse.Builder responseBuilder = DashboardDashboardFiltersResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create a dashboard filter on the dashboard with a specific id. + */ + @Override + public void createDashboardFilter(CreateDashboardFilterRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/dashboard_filters", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateDashboardFilterResponse.Builder responseBuilder = CreateDashboardFilterResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about the dashboard elements with a specific id. + */ + @Override + public void dashboardLayoutComponent(DashboardLayoutComponentRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/dashboard_layout_components/{dashboard_layout_component_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DashboardLayoutComponentResponse.Builder responseBuilder = DashboardLayoutComponentResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update the dashboard element with a specific id. + */ + @Override + public void updateDashboardLayoutComponent(UpdateDashboardLayoutComponentRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/dashboard_layout_components/{dashboard_layout_component_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateDashboardLayoutComponentResponse.Builder responseBuilder = UpdateDashboardLayoutComponentResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about all the dashboard layout components for a dashboard layout with a specific id. + */ + @Override + public void dashboardLayoutDashboardLayoutComponents(DashboardLayoutDashboardLayoutComponentsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/dashboard_layouts/{dashboard_layout_id}/dashboard_layout_components", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DashboardLayoutDashboardLayoutComponentsResponse.Builder responseBuilder = DashboardLayoutDashboardLayoutComponentsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about the dashboard layouts with a specific id. + */ + @Override + public void dashboardLayout(DashboardLayoutRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/dashboard_layouts/{dashboard_layout_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DashboardLayoutResponse.Builder responseBuilder = DashboardLayoutResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update the dashboard layout with a specific id. + */ + @Override + public void updateDashboardLayout(UpdateDashboardLayoutRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/dashboard_layouts/{dashboard_layout_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateDashboardLayoutResponse.Builder responseBuilder = UpdateDashboardLayoutResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Delete a dashboard layout with a specific id. + */ + @Override + public void deleteDashboardLayout(DeleteDashboardLayoutRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/dashboard_layouts/{dashboard_layout_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteDashboardLayoutResponse.Builder responseBuilder = DeleteDashboardLayoutResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about all the dashboard elements on a dashboard with a specific id. + */ + @Override + public void dashboardDashboardLayouts(DashboardDashboardLayoutsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/dashboards/{dashboard_id}/dashboard_layouts", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DashboardDashboardLayoutsResponse.Builder responseBuilder = DashboardDashboardLayoutsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create a dashboard layout on the dashboard with a specific id. + */ + @Override + public void createDashboardLayout(CreateDashboardLayoutRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/dashboard_layouts", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateDashboardLayoutResponse.Builder responseBuilder = CreateDashboardLayoutResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + //#endregion Dashboard: Manage Dashboards + + //#region DataAction: Run Data Actions + + /** + * Perform a data action. The data action object can be obtained from query results, and used to perform an arbitrary action. + */ + @Override + public void performDataAction(PerformDataActionRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/data_actions", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + PerformDataActionResponse.Builder responseBuilder = PerformDataActionResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * For some data actions, the remote server may supply a form requesting further user input. This endpoint takes a data action, asks the remote server to generate a form for it, and returns that form to you for presentation to the user. + */ + @Override + public void fetchRemoteDataActionForm(FetchRemoteDataActionFormRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/data_actions/form", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + FetchRemoteDataActionFormResponse.Builder responseBuilder = FetchRemoteDataActionFormResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + //#endregion DataAction: Run Data Actions + + //#region Datagroup: Manage Datagroups + + /** + * ### Get information about all datagroups. + * + */ + @Override + public void allDatagroups(AllDatagroupsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/datagroups", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllDatagroupsResponse.Builder responseBuilder = AllDatagroupsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about a datagroup. + * + */ + @Override + public void datagroup(DatagroupRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/datagroups/{datagroup_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DatagroupResponse.Builder responseBuilder = DatagroupResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update a datagroup using the specified params. + * + */ + @Override + public void updateDatagroup(UpdateDatagroupRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/datagroups/{datagroup_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateDatagroupResponse.Builder responseBuilder = UpdateDatagroupResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + //#endregion Datagroup: Manage Datagroups + + //#region Folder: Manage Folders + + /** + * Search for folders by creator id, parent id, name, etc + */ + @Override + public void searchFolders(SearchFoldersRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/folders/search", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SearchFoldersResponse.Builder responseBuilder = SearchFoldersResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about the folder with a specific id. + */ + @Override + public void folder(FolderRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/folders/{folder_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + FolderResponse.Builder responseBuilder = FolderResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update the folder with a specific id. + */ + @Override + public void updateFolder(UpdateFolderRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/folders/{folder_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateFolderResponse.Builder responseBuilder = UpdateFolderResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Delete the folder with a specific id including any children folders. + * **DANGER** this will delete all looks and dashboards in the folder. + * + */ + @Override + public void deleteFolder(DeleteFolderRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/folders/{folder_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteFolderResponse.Builder responseBuilder = DeleteFolderResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about all folders. + * + * In API 3.x, this will not return empty personal folders, unless they belong to the calling user. + * In API 4.0+, all personal folders will be returned. + * + * + */ + @Override + public void allFolders(AllFoldersRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/folders", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllFoldersResponse.Builder responseBuilder = AllFoldersResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create a folder with specified information. + * + * Caller must have permission to edit the parent folder and to create folders, otherwise the request + * returns 404 Not Found. + * + */ + @Override + public void createFolder(CreateFolderRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/folders", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateFolderResponse.Builder responseBuilder = CreateFolderResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get the children of a folder. + */ + @Override + public void folderChildren(FolderChildrenRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/folders/{folder_id}/children", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + FolderChildrenResponse.Builder responseBuilder = FolderChildrenResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Search the children of a folder + */ + @Override + public void folderChildrenSearch(FolderChildrenSearchRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/folders/{folder_id}/children/search", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + FolderChildrenSearchResponse.Builder responseBuilder = FolderChildrenSearchResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get the parent of a folder + */ + @Override + public void folderParent(FolderParentRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/folders/{folder_id}/parent", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + FolderParentResponse.Builder responseBuilder = FolderParentResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get the ancestors of a folder + */ + @Override + public void folderAncestors(FolderAncestorsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/folders/{folder_id}/ancestors", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + FolderAncestorsResponse.Builder responseBuilder = FolderAncestorsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get all looks in a folder. + * In API 3.x, this will return all looks in a folder, including looks in the trash. + * In API 4.0+, all looks in a folder will be returned, excluding looks in the trash. + * + */ + @Override + public void folderLooks(FolderLooksRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/folders/{folder_id}/looks", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + FolderLooksResponse.Builder responseBuilder = FolderLooksResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get the dashboards in a folder + */ + @Override + public void folderDashboards(FolderDashboardsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/folders/{folder_id}/dashboards", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + FolderDashboardsResponse.Builder responseBuilder = FolderDashboardsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + //#endregion Folder: Manage Folders + + //#region Group: Manage Groups + + /** + * ### Get information about all groups. + * + */ + @Override + public void allGroups(AllGroupsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/groups", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllGroupsResponse.Builder responseBuilder = AllGroupsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Creates a new group (admin only). + * + */ + @Override + public void createGroup(CreateGroupRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/groups", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateGroupResponse.Builder responseBuilder = CreateGroupResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Search groups + * + * Returns all group records that match the given search criteria. + * + * If multiple search params are given and `filter_or` is FALSE or not specified, + * search params are combined in a logical AND operation. + * Only rows that match *all* search param criteria will be returned. + * + * If `filter_or` is TRUE, multiple search params are combined in a logical OR operation. + * Results will include rows that match **any** of the search criteria. + * + * String search params use case-insensitive matching. + * String search params can contain `%` and '_' as SQL LIKE pattern match wildcard expressions. + * example="dan%" will match "danger" and "Danzig" but not "David" + * example="D_m%" will match "Damage" and "dump" + * + * Integer search params can accept a single value or a comma separated list of values. The multiple + * values will be combined under a logical OR operation - results will match at least one of + * the given values. + * + * Most search params can accept "IS NULL" and "NOT NULL" as special expressions to match + * or exclude (respectively) rows where the column is null. + * + * Boolean search params accept only "true" and "false" as values. + * + * + */ + @Override + public void searchGroups(SearchGroupsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/groups/search", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SearchGroupsResponse.Builder responseBuilder = SearchGroupsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Search groups include roles + * + * Returns all group records that match the given search criteria, and attaches any associated roles. + * + * If multiple search params are given and `filter_or` is FALSE or not specified, + * search params are combined in a logical AND operation. + * Only rows that match *all* search param criteria will be returned. + * + * If `filter_or` is TRUE, multiple search params are combined in a logical OR operation. + * Results will include rows that match **any** of the search criteria. + * + * String search params use case-insensitive matching. + * String search params can contain `%` and '_' as SQL LIKE pattern match wildcard expressions. + * example="dan%" will match "danger" and "Danzig" but not "David" + * example="D_m%" will match "Damage" and "dump" + * + * Integer search params can accept a single value or a comma separated list of values. The multiple + * values will be combined under a logical OR operation - results will match at least one of + * the given values. + * + * Most search params can accept "IS NULL" and "NOT NULL" as special expressions to match + * or exclude (respectively) rows where the column is null. + * + * Boolean search params accept only "true" and "false" as values. + * + * + */ + @Override + public void searchGroupsWithRoles(SearchGroupsWithRolesRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/groups/search/with_roles", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SearchGroupsWithRolesResponse.Builder responseBuilder = SearchGroupsWithRolesResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Search groups include hierarchy + * + * Returns all group records that match the given search criteria, and attaches + * associated role_ids and parent group_ids. + * + * If multiple search params are given and `filter_or` is FALSE or not specified, + * search params are combined in a logical AND operation. + * Only rows that match *all* search param criteria will be returned. + * + * If `filter_or` is TRUE, multiple search params are combined in a logical OR operation. + * Results will include rows that match **any** of the search criteria. + * + * String search params use case-insensitive matching. + * String search params can contain `%` and '_' as SQL LIKE pattern match wildcard expressions. + * example="dan%" will match "danger" and "Danzig" but not "David" + * example="D_m%" will match "Damage" and "dump" + * + * Integer search params can accept a single value or a comma separated list of values. The multiple + * values will be combined under a logical OR operation - results will match at least one of + * the given values. + * + * Most search params can accept "IS NULL" and "NOT NULL" as special expressions to match + * or exclude (respectively) rows where the column is null. + * + * Boolean search params accept only "true" and "false" as values. + * + * + */ + @Override + public void searchGroupsWithHierarchy(SearchGroupsWithHierarchyRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/groups/search/with_hierarchy", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SearchGroupsWithHierarchyResponse.Builder responseBuilder = SearchGroupsWithHierarchyResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about a group. + * + */ + @Override + public void group(GroupRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/groups/{group_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + GroupResponse.Builder responseBuilder = GroupResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Updates the a group (admin only). + */ + @Override + public void updateGroup(UpdateGroupRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/groups/{group_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateGroupResponse.Builder responseBuilder = UpdateGroupResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Deletes a group (admin only). + * + */ + @Override + public void deleteGroup(DeleteGroupRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/groups/{group_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteGroupResponse.Builder responseBuilder = DeleteGroupResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about all the groups in a group + * + */ + @Override + public void allGroupGroups(AllGroupGroupsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/groups/{group_id}/groups", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllGroupGroupsResponse.Builder responseBuilder = AllGroupGroupsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Adds a new group to a group. + * + */ + @Override + public void addGroupGroup(AddGroupGroupRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/groups/{group_id}/groups", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AddGroupGroupResponse.Builder responseBuilder = AddGroupGroupResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about all the users directly included in a group. + * + */ + @Override + public void allGroupUsers(AllGroupUsersRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/groups/{group_id}/users", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllGroupUsersResponse.Builder responseBuilder = AllGroupUsersResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Adds a new user to a group. + * + */ + @Override + public void addGroupUser(AddGroupUserRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/groups/{group_id}/users", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AddGroupUserResponse.Builder responseBuilder = AddGroupUserResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Removes a user from a group. + * + */ + @Override + public void deleteGroupUser(DeleteGroupUserRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/groups/{group_id}/users/{user_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteGroupUserResponse.Builder responseBuilder = DeleteGroupUserResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Removes a group from a group. + * + */ + @Override + public void deleteGroupFromGroup(DeleteGroupFromGroupRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/groups/{group_id}/groups/{deleting_group_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteGroupFromGroupResponse.Builder responseBuilder = DeleteGroupFromGroupResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Set the value of a user attribute for a group. + * + * For information about how user attribute values are calculated, see [Set User Attribute Group Values](#!/UserAttribute/set_user_attribute_group_values). + * + */ + @Override + public void updateUserAttributeGroupValue(UpdateUserAttributeGroupValueRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/groups/{group_id}/attribute_values/{user_attribute_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateUserAttributeGroupValueResponse.Builder responseBuilder = UpdateUserAttributeGroupValueResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Remove a user attribute value from a group. + * + */ + @Override + public void deleteUserAttributeGroupValue(DeleteUserAttributeGroupValueRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/groups/{group_id}/attribute_values/{user_attribute_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteUserAttributeGroupValueResponse.Builder responseBuilder = DeleteUserAttributeGroupValueResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + //#endregion Group: Manage Groups + + //#region Homepage: Manage Homepage + + /** + * ### Get information about the primary homepage's sections. + * + */ + @Override + public void allPrimaryHomepageSections(AllPrimaryHomepageSectionsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/primary_homepage_sections", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllPrimaryHomepageSectionsResponse.Builder responseBuilder = AllPrimaryHomepageSectionsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + //#endregion Homepage: Manage Homepage + + //#region Integration: Manage Integrations + + /** + * ### Get information about all Integration Hubs. + * + */ + @Override + public void allIntegrationHubs(AllIntegrationHubsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/integration_hubs", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllIntegrationHubsResponse.Builder responseBuilder = AllIntegrationHubsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create a new Integration Hub. + * + * This API is rate limited to prevent it from being used for SSRF attacks + * + */ + @Override + public void createIntegrationHub(CreateIntegrationHubRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/integration_hubs", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateIntegrationHubResponse.Builder responseBuilder = CreateIntegrationHubResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about a Integration Hub. + * + */ + @Override + public void integrationHub(IntegrationHubRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/integration_hubs/{integration_hub_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + IntegrationHubResponse.Builder responseBuilder = IntegrationHubResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update a Integration Hub definition. + * + * This API is rate limited to prevent it from being used for SSRF attacks + * + */ + @Override + public void updateIntegrationHub(UpdateIntegrationHubRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/integration_hubs/{integration_hub_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateIntegrationHubResponse.Builder responseBuilder = UpdateIntegrationHubResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Delete a Integration Hub. + * + */ + @Override + public void deleteIntegrationHub(DeleteIntegrationHubRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/integration_hubs/{integration_hub_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteIntegrationHubResponse.Builder responseBuilder = DeleteIntegrationHubResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * Accepts the legal agreement for a given integration hub. This only works for integration hubs that have legal_agreement_required set to true and legal_agreement_signed set to false. + */ + @Override + public void acceptIntegrationHubLegalAgreement(AcceptIntegrationHubLegalAgreementRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/integration_hubs/{integration_hub_id}/accept_legal_agreement", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AcceptIntegrationHubLegalAgreementResponse.Builder responseBuilder = AcceptIntegrationHubLegalAgreementResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about all Integrations. + * + */ + @Override + public void allIntegrations(AllIntegrationsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/integrations", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllIntegrationsResponse.Builder responseBuilder = AllIntegrationsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about a Integration. + * + */ + @Override + public void integration(IntegrationRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/integrations/{integration_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + IntegrationResponse.Builder responseBuilder = IntegrationResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update parameters on a Integration. + * + */ + @Override + public void updateIntegration(UpdateIntegrationRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/integrations/{integration_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateIntegrationResponse.Builder responseBuilder = UpdateIntegrationResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * Returns the Integration form for presentation to the user. + */ + @Override + public void fetchIntegrationForm(FetchIntegrationFormRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/integrations/{integration_id}/form", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + FetchIntegrationFormResponse.Builder responseBuilder = FetchIntegrationFormResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * Tests the integration to make sure all the settings are working. + */ + @Override + public void testIntegration(TestIntegrationRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/integrations/{integration_id}/test", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + TestIntegrationResponse.Builder responseBuilder = TestIntegrationResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + //#endregion Integration: Manage Integrations + + //#region Look: Run and Manage Looks + + /** + * ### Get information about all active Looks + * + * Returns an array of **abbreviated Look objects** describing all the looks that the caller has access to. Soft-deleted Looks are **not** included. + * + * Get the **full details** of a specific look by id with [look(id)](#!/Look/look) + * + * Find **soft-deleted looks** with [search_looks()](#!/Look/search_looks) + * + */ + @Override + public void allLooks(AllLooksRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/looks", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllLooksResponse.Builder responseBuilder = AllLooksResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create a Look + * + * To create a look to display query data, first create the query with [create_query()](#!/Query/create_query) + * then assign the query's id to the `query_id` property in the call to `create_look()`. + * + * To place the look into a particular space, assign the space's id to the `space_id` property + * in the call to `create_look()`. + * + */ + @Override + public void createLook(CreateLookRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/looks", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateLookResponse.Builder responseBuilder = CreateLookResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Search Looks + * + * Returns an **array of Look objects** that match the specified search criteria. + * + * If multiple search params are given and `filter_or` is FALSE or not specified, + * search params are combined in a logical AND operation. + * Only rows that match *all* search param criteria will be returned. + * + * If `filter_or` is TRUE, multiple search params are combined in a logical OR operation. + * Results will include rows that match **any** of the search criteria. + * + * String search params use case-insensitive matching. + * String search params can contain `%` and '_' as SQL LIKE pattern match wildcard expressions. + * example="dan%" will match "danger" and "Danzig" but not "David" + * example="D_m%" will match "Damage" and "dump" + * + * Integer search params can accept a single value or a comma separated list of values. The multiple + * values will be combined under a logical OR operation - results will match at least one of + * the given values. + * + * Most search params can accept "IS NULL" and "NOT NULL" as special expressions to match + * or exclude (respectively) rows where the column is null. + * + * Boolean search params accept only "true" and "false" as values. + * + * + * Get a **single look** by id with [look(id)](#!/Look/look) + * + */ + @Override + public void searchLooks(SearchLooksRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/looks/search", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SearchLooksResponse.Builder responseBuilder = SearchLooksResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get a Look. + * + * Returns detailed information about a Look and its associated Query. + * + * + */ + @Override + public void look(LookRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/looks/{look_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + LookResponse.Builder responseBuilder = LookResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Modify a Look + * + * Use this function to modify parts of a look. Property values given in a call to `update_look` are + * applied to the existing look, so there's no need to include properties whose values are not changing. + * It's best to specify only the properties you want to change and leave everything else out + * of your `update_look` call. **Look properties marked 'read-only' will be ignored.** + * + * When a user deletes a look in the Looker UI, the look data remains in the database but is + * marked with a deleted flag ("soft-deleted"). Soft-deleted looks can be undeleted (by an admin) + * if the delete was in error. + * + * To soft-delete a look via the API, use [update_look()](#!/Look/update_look) to change the look's `deleted` property to `true`. + * You can undelete a look by calling `update_look` to change the look's `deleted` property to `false`. + * + * Soft-deleted looks are excluded from the results of [all_looks()](#!/Look/all_looks) and [search_looks()](#!/Look/search_looks), so they + * essentially disappear from view even though they still reside in the db. + * In API 3.1 and later, you can pass `deleted: true` as a parameter to [search_looks()](#!/3.1/Look/search_looks) to list soft-deleted looks. + * + * NOTE: [delete_look()](#!/Look/delete_look) performs a "hard delete" - the look data is removed from the Looker + * database and destroyed. There is no "undo" for `delete_look()`. + * + */ + @Override + public void updateLook(UpdateLookRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/looks/{look_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateLookResponse.Builder responseBuilder = UpdateLookResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Permanently Delete a Look + * + * This operation **permanently** removes a look from the Looker database. + * + * NOTE: There is no "undo" for this kind of delete. + * + * For information about soft-delete (which can be undone) see [update_look()](#!/Look/update_look). + * + */ + @Override + public void deleteLook(DeleteLookRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/looks/{look_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteLookResponse.Builder responseBuilder = DeleteLookResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Run a Look + * + * Runs a given look's query and returns the results in the requested format. + * + * Supported formats: + * + * | result_format | Description + * | :-----------: | :--- | + * | json | Plain json + * | json_detail | Row data plus metadata describing the fields, pivots, table calcs, and other aspects of the query + * | csv | Comma separated values with a header + * | txt | Tab separated values with a header + * | html | Simple html + * | md | Simple markdown + * | xlsx | MS Excel spreadsheet + * | sql | Returns the generated SQL rather than running the query + * | png | A PNG image of the visualization of the query + * | jpg | A JPG image of the visualization of the query + * + * + * + */ + @Override + public void runLook(RunLookRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/looks/{look_id}/run/{result_format}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + RunLookResponse.Builder responseBuilder = RunLookResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + //#endregion Look: Run and Manage Looks + + //#region LookmlModel: Manage LookML Models + + /** + * ### Get information about all lookml models. + * + */ + @Override + public void allLookmlModels(AllLookmlModelsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/lookml_models", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllLookmlModelsResponse.Builder responseBuilder = AllLookmlModelsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create a lookml model using the specified configuration. + * + */ + @Override + public void createLookmlModel(CreateLookmlModelRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/lookml_models", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateLookmlModelResponse.Builder responseBuilder = CreateLookmlModelResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about a lookml model. + * + */ + @Override + public void lookmlModel(LookmlModelRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/lookml_models/{lookml_model_name}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + LookmlModelResponse.Builder responseBuilder = LookmlModelResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update a lookml model using the specified configuration. + * + */ + @Override + public void updateLookmlModel(UpdateLookmlModelRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/lookml_models/{lookml_model_name}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateLookmlModelResponse.Builder responseBuilder = UpdateLookmlModelResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Delete a lookml model. + * + */ + @Override + public void deleteLookmlModel(DeleteLookmlModelRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/lookml_models/{lookml_model_name}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteLookmlModelResponse.Builder responseBuilder = DeleteLookmlModelResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about a lookml model explore. + * + */ + @Override + public void lookmlModelExplore(LookmlModelExploreRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/lookml_models/{lookml_model_name}/explores/{explore_name}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + LookmlModelExploreResponse.Builder responseBuilder = LookmlModelExploreResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + //#endregion LookmlModel: Manage LookML Models + + //#region Metadata: Connection Metadata Features + + /** + * ### Field name suggestions for a model and view + * + * + */ + @Override + public void modelFieldnameSuggestions(ModelFieldnameSuggestionsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/models/{model_name}/views/{view_name}/fields/{field_name}/suggestions", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ModelFieldnameSuggestionsResponse.Builder responseBuilder = ModelFieldnameSuggestionsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### List databases available to this connection + * + * Certain dialects can support multiple databases per single connection. + * If this connection supports multiple databases, the database names will be returned in an array. + * + * Connections using dialects that do not support multiple databases will return an empty array. + * + * **Note**: [Connection Features](#!/Metadata/connection_features) can be used to determine if a connection supports + * multiple databases. + * + */ + @Override + public void connectionDatabases(ConnectionDatabasesRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/connections/{connection_name}/databases", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ConnectionDatabasesResponse.Builder responseBuilder = ConnectionDatabasesResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Retrieve metadata features for this connection + * + * Returns a list of feature names with `true` (available) or `false` (not available) + * + * + */ + @Override + public void connectionFeatures(ConnectionFeaturesRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/connections/{connection_name}/features", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ConnectionFeaturesResponse.Builder responseBuilder = ConnectionFeaturesResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get the list of schemas and tables for a connection + * + * + */ + @Override + public void connectionSchemas(ConnectionSchemasRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/connections/{connection_name}/schemas", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ConnectionSchemasResponse.Builder responseBuilder = ConnectionSchemasResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get the list of tables for a schema + * + * For dialects that support multiple databases, optionally identify which to use. If not provided, the default + * database for the connection will be used. + * + * For dialects that do **not** support multiple databases, **do not use** the database parameter + * + */ + @Override + public void connectionTables(ConnectionTablesRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/connections/{connection_name}/tables", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ConnectionTablesResponse.Builder responseBuilder = ConnectionTablesResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get the columns (and therefore also the tables) in a specific schema + * + * + */ + @Override + public void connectionColumns(ConnectionColumnsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/connections/{connection_name}/columns", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ConnectionColumnsResponse.Builder responseBuilder = ConnectionColumnsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Search a connection for columns matching the specified name + * + * **Note**: `column_name` must be a valid column name. It is not a search pattern. + * + */ + @Override + public void connectionSearchColumns(ConnectionSearchColumnsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/connections/{connection_name}/search_columns", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ConnectionSearchColumnsResponse.Builder responseBuilder = ConnectionSearchColumnsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Connection cost estimating + * + * Assign a `sql` statement to the body of the request. e.g., for Ruby, `{sql: 'select * from users'}` + * + * **Note**: If the connection's dialect has no support for cost estimates, an error will be returned + * + */ + @Override + public void connectionCostEstimate(ConnectionCostEstimateRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/connections/{connection_name}/cost_estimate", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ConnectionCostEstimateResponse.Builder responseBuilder = ConnectionCostEstimateResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + //#endregion Metadata: Connection Metadata Features + + //#region Project: Manage Projects + + /** + * ### Generate Lockfile for All LookML Dependencies + * + * Git must have been configured, must be in dev mode and deploy permission required + * + * Install_all is a two step process + * 1. For each remote_dependency in a project the dependency manager will resolve any ambiguous ref. + * 2. The project will then write out a lockfile including each remote_dependency with its resolved ref. + * + * + */ + @Override + public void lockAll(LockAllRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/projects/{project_id}/manifest/lock_all", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + LockAllResponse.Builder responseBuilder = LockAllResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get All Git Branches + * + * Returns a list of git branches in the project repository + * + */ + @Override + public void allGitBranches(AllGitBranchesRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/projects/{project_id}/git_branches", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllGitBranchesResponse.Builder responseBuilder = AllGitBranchesResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get the Current Git Branch + * + * Returns the git branch currently checked out in the given project repository + * + */ + @Override + public void gitBranch(GitBranchRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/projects/{project_id}/git_branch", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + GitBranchResponse.Builder responseBuilder = GitBranchResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Checkout and/or reset --hard an existing Git Branch + * + * Only allowed in development mode + * - Call `update_session` to select the 'dev' workspace. + * + * Checkout an existing branch if name field is different from the name of the currently checked out branch. + * + * Optionally specify a branch name, tag name or commit SHA to which the branch should be reset. + * **DANGER** hard reset will be force pushed to the remote. Unsaved changes and commits may be permanently lost. + * + * + */ + @Override + public void updateGitBranch(UpdateGitBranchRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.put("/projects/{project_id}/git_branch", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateGitBranchResponse.Builder responseBuilder = UpdateGitBranchResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create and Checkout a Git Branch + * + * Creates and checks out a new branch in the given project repository + * Only allowed in development mode + * - Call `update_session` to select the 'dev' workspace. + * + * Optionally specify a branch name, tag name or commit SHA as the start point in the ref field. + * If no ref is specified, HEAD of the current branch will be used as the start point for the new branch. + * + * + */ + @Override + public void createGitBranch(CreateGitBranchRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/projects/{project_id}/git_branch", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateGitBranchResponse.Builder responseBuilder = CreateGitBranchResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get the specified Git Branch + * + * Returns the git branch specified in branch_name path param if it exists in the given project repository + * + */ + @Override + public void findGitBranch(FindGitBranchRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/projects/{project_id}/git_branch/{branch_name}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + FindGitBranchResponse.Builder responseBuilder = FindGitBranchResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Delete the specified Git Branch + * + * Delete git branch specified in branch_name path param from local and remote of specified project repository + * + */ + @Override + public void deleteGitBranch(DeleteGitBranchRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/projects/{project_id}/git_branch/{branch_name}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteGitBranchResponse.Builder responseBuilder = DeleteGitBranchResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Deploy a Remote Branch or Ref to Production + * + * Git must have been configured and deploy permission required. + * + * Deploy is a one/two step process + * 1. If this is the first deploy of this project, create the production project with git repository. + * 2. Pull the branch or ref into the production project. + * + * Can only specify either a branch or a ref. + * + * + */ + @Override + public void deployRefToProduction(DeployRefToProductionRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/projects/{project_id}/deploy_ref_to_production", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeployRefToProductionResponse.Builder responseBuilder = DeployRefToProductionResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Deploy LookML from this Development Mode Project to Production + * + * Git must have been configured, must be in dev mode and deploy permission required + * + * Deploy is a two / three step process: + * + * 1. Push commits in current branch of dev mode project to the production branch (origin/master). + * Note a. This step is skipped in read-only projects. + * Note b. If this step is unsuccessful for any reason (e.g. rejected non-fastforward because production branch has + * commits not in current branch), subsequent steps will be skipped. + * 2. If this is the first deploy of this project, create the production project with git repository. + * 3. Pull the production branch into the production project. + * + * + */ + @Override + public void deployToProduction(DeployToProductionRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/projects/{project_id}/deploy_to_production", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeployToProductionResponse.Builder responseBuilder = DeployToProductionResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Reset a project to the revision of the project that is in production. + * + * **DANGER** this will delete any changes that have not been pushed to a remote repository. + * + */ + @Override + public void resetProjectToProduction(ResetProjectToProductionRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/projects/{project_id}/reset_to_production", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ResetProjectToProductionResponse.Builder responseBuilder = ResetProjectToProductionResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Reset a project development branch to the revision of the project that is on the remote. + * + * **DANGER** this will delete any changes that have not been pushed to a remote repository. + * + */ + @Override + public void resetProjectToRemote(ResetProjectToRemoteRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/projects/{project_id}/reset_to_remote", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ResetProjectToRemoteResponse.Builder responseBuilder = ResetProjectToRemoteResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get All Projects + * + * Returns all projects visible to the current user + * + */ + @Override + public void allProjects(AllProjectsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/projects", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllProjectsResponse.Builder responseBuilder = AllProjectsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create A Project + * + * dev mode required. + * - Call `update_session` to select the 'dev' workspace. + * + * `name` is required. + * `git_remote_url` is not allowed. To configure Git for the newly created project, follow the instructions in `update_project`. + * + * + */ + @Override + public void createProject(CreateProjectRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/projects", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateProjectResponse.Builder responseBuilder = CreateProjectResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get A Project + * + * Returns the project with the given project id + * + */ + @Override + public void project(ProjectRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/projects/{project_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ProjectResponse.Builder responseBuilder = ProjectResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update Project Configuration + * + * Apply changes to a project's configuration. + * + * + * #### Configuring Git for a Project + * + * To set up a Looker project with a remote git repository, follow these steps: + * + * 1. Call `update_session` to select the 'dev' workspace. + * 1. Call `create_git_deploy_key` to create a new deploy key for the project + * 1. Copy the deploy key text into the remote git repository's ssh key configuration + * 1. Call `update_project` to set project's `git_remote_url` ()and `git_service_name`, if necessary). + * + * When you modify a project's `git_remote_url`, Looker connects to the remote repository to fetch + * metadata. The remote git repository MUST be configured with the Looker-generated deploy + * key for this project prior to setting the project's `git_remote_url`. + * + * To set up a Looker project with a git repository residing on the Looker server (a 'bare' git repo): + * + * 1. Call `update_session` to select the 'dev' workspace. + * 1. Call `update_project` setting `git_remote_url` to null and `git_service_name` to "bare". + * + * + */ + @Override + public void updateProject(UpdateProjectRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/projects/{project_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateProjectResponse.Builder responseBuilder = UpdateProjectResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get A Projects Manifest object + * + * Returns the project with the given project id + * + */ + @Override + public void manifest(ManifestRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/projects/{project_id}/manifest", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ManifestResponse.Builder responseBuilder = ManifestResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Git Deploy Key + * + * Returns the ssh public key previously created for a project's git repository. + * + */ + @Override + public void gitDeployKey(GitDeployKeyRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/projects/{project_id}/git/deploy_key", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + GitDeployKeyResponse.Builder responseBuilder = GitDeployKeyResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create Git Deploy Key + * + * Create a public/private key pair for authenticating ssh git requests from Looker to a remote git repository + * for a particular Looker project. + * + * Returns the public key of the generated ssh key pair. + * + * Copy this public key to your remote git repository's ssh keys configuration so that the remote git service can + * validate and accept git requests from the Looker server. + * + */ + @Override + public void createGitDeployKey(CreateGitDeployKeyRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/projects/{project_id}/git/deploy_key", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateGitDeployKeyResponse.Builder responseBuilder = CreateGitDeployKeyResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get Cached Project Validation Results + * + * Returns the cached results of a previous project validation calculation, if any. + * Returns http status 204 No Content if no validation results exist. + * + * Validating the content of all the files in a project can be computationally intensive + * for large projects. Use this API to simply fetch the results of the most recent + * project validation rather than revalidating the entire project from scratch. + * + * A value of `"stale": true` in the response indicates that the project has changed since + * the cached validation results were computed. The cached validation results may no longer + * reflect the current state of the project. + * + */ + @Override + public void projectValidationResults(ProjectValidationResultsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/projects/{project_id}/validate", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ProjectValidationResultsResponse.Builder responseBuilder = ProjectValidationResultsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Validate Project + * + * Performs lint validation of all lookml files in the project. + * Returns a list of errors found, if any. + * + * Validating the content of all the files in a project can be computationally intensive + * for large projects. For best performance, call `validate_project(project_id)` only + * when you really want to recompute project validation. To quickly display the results of + * the most recent project validation (without recomputing), use `project_validation_results(project_id)` + * + */ + @Override + public void validateProject(ValidateProjectRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/projects/{project_id}/validate", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ValidateProjectResponse.Builder responseBuilder = ValidateProjectResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get Project Workspace + * + * Returns information about the state of the project files in the currently selected workspace + * + */ + @Override + public void projectWorkspace(ProjectWorkspaceRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/projects/{project_id}/current_workspace", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ProjectWorkspaceResponse.Builder responseBuilder = ProjectWorkspaceResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get All Project Files + * + * Returns a list of the files in the project + * + */ + @Override + public void allProjectFiles(AllProjectFilesRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/projects/{project_id}/files", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllProjectFilesResponse.Builder responseBuilder = AllProjectFilesResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get Project File Info + * + * Returns information about a file in the project + * + */ + @Override + public void projectFile(ProjectFileRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/projects/{project_id}/files/file", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ProjectFileResponse.Builder responseBuilder = ProjectFileResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get All Git Connection Tests + * + * dev mode required. + * - Call `update_session` to select the 'dev' workspace. + * + * Returns a list of tests which can be run against a project's (or the dependency project for the provided remote_url) git connection. Call [Run Git Connection Test](#!/Project/run_git_connection_test) to execute each test in sequence. + * + * Tests are ordered by increasing specificity. Tests should be run in the order returned because later tests require functionality tested by tests earlier in the test list. + * + * For example, a late-stage test for write access is meaningless if connecting to the git server (an early test) is failing. + * + */ + @Override + public void allGitConnectionTests(AllGitConnectionTestsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/projects/{project_id}/git_connection_tests", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllGitConnectionTestsResponse.Builder responseBuilder = AllGitConnectionTestsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Run a git connection test + * + * Run the named test on the git service used by this project (or the dependency project for the provided remote_url) and return the result. This + * is intended to help debug git connections when things do not work properly, to give + * more helpful information about why a git url is not working with Looker. + * + * Tests should be run in the order they are returned by [Get All Git Connection Tests](#!/Project/all_git_connection_tests). + * + */ + @Override + public void runGitConnectionTest(RunGitConnectionTestRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/projects/{project_id}/git_connection_tests/{test_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + RunGitConnectionTestResponse.Builder responseBuilder = RunGitConnectionTestResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get All LookML Tests + * + * Returns a list of tests which can be run to validate a project's LookML code and/or the underlying data, + * optionally filtered by the file id. + * Call [Run LookML Test](#!/Project/run_lookml_test) to execute tests. + * + */ + @Override + public void allLookmlTests(AllLookmlTestsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/projects/{project_id}/lookml_tests", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllLookmlTestsResponse.Builder responseBuilder = AllLookmlTestsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Run LookML Tests + * + * Runs all tests in the project, optionally filtered by file, test, and/or model. + * + */ + @Override + public void runLookmlTest(RunLookmlTestRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/projects/{project_id}/lookml_tests/run", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + RunLookmlTestResponse.Builder responseBuilder = RunLookmlTestResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Configure Repository Credential for a remote dependency + * + * Admin required. + * + * `root_project_id` is required. + * `credential_id` is required. + * + * + */ + @Override + public void updateRepositoryCredential(UpdateRepositoryCredentialRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.put("/projects/{root_project_id}/credential/{credential_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateRepositoryCredentialResponse.Builder responseBuilder = UpdateRepositoryCredentialResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Repository Credential for a remote dependency + * + * Admin required. + * + * `root_project_id` is required. + * `credential_id` is required. + * + */ + @Override + public void deleteRepositoryCredential(DeleteRepositoryCredentialRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/projects/{root_project_id}/credential/{credential_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteRepositoryCredentialResponse.Builder responseBuilder = DeleteRepositoryCredentialResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get all Repository Credentials for a project + * + * `root_project_id` is required. + * + */ + @Override + public void getAllRepositoryCredentials(GetAllRepositoryCredentialsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/projects/{root_project_id}/credentials", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + GetAllRepositoryCredentialsResponse.Builder responseBuilder = GetAllRepositoryCredentialsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + //#endregion Project: Manage Projects + + //#region Query: Run and Manage Queries + + /** + * ### Create an async query task + * + * Creates a query task (job) to run a previously created query asynchronously. Returns a Query Task ID. + * + * Use [query_task(query_task_id)](#!/Query/query_task) to check the execution status of the query task. + * After the query task status reaches "Complete", use [query_task_results(query_task_id)](#!/Query/query_task_results) to fetch the results of the query. + * + */ + @Override + public void createQueryTask(CreateQueryTaskRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/query_tasks", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateQueryTaskResponse.Builder responseBuilder = CreateQueryTaskResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Fetch results of multiple async queries + * + * Returns the results of multiple async queries in one request. + * + * For Query Tasks that are not completed, the response will include the execution status of the Query Task but will not include query results. + * Query Tasks whose results have expired will have a status of 'expired'. + * If the user making the API request does not have sufficient privileges to view a Query Task result, the result will have a status of 'missing' + * + */ + @Override + public void queryTaskMultiResults(QueryTaskMultiResultsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/query_tasks/multi_results", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + QueryTaskMultiResultsResponse.Builder responseBuilder = QueryTaskMultiResultsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get Query Task details + * + * Use this function to check the status of an async query task. After the status + * reaches "Complete", you can call [query_task_results(query_task_id)](#!/Query/query_task_results) to + * retrieve the results of the query. + * + * Use [create_query_task()](#!/Query/create_query_task) to create an async query task. + * + */ + @Override + public void queryTask(QueryTaskRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/query_tasks/{query_task_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + QueryTaskResponse.Builder responseBuilder = QueryTaskResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get Async Query Results + * + * Returns the results of an async query task if the query has completed. + * + * If the query task is still running or waiting to run, this function returns 204 No Content. + * + * If the query task ID is invalid or the cached results of the query task have expired, this function returns 404 Not Found. + * + * Use [query_task(query_task_id)](#!/Query/query_task) to check the execution status of the query task + * Call query_task_results only after the query task status reaches "Complete". + * + * You can also use [query_task_multi_results()](#!/Query/query_task_multi_results) retrieve the + * results of multiple async query tasks at the same time. + * + * #### SQL Error Handling: + * If the query fails due to a SQL db error, how this is communicated depends on the result_format you requested in `create_query_task()`. + * + * For `json_detail` result_format: `query_task_results()` will respond with HTTP status '200 OK' and db SQL error info + * will be in the `errors` property of the response object. The 'data' property will be empty. + * + * For all other result formats: `query_task_results()` will respond with HTTP status `400 Bad Request` and some db SQL error info + * will be in the message of the 400 error response, but not as detailed as expressed in `json_detail.errors`. + * These data formats can only carry row data, and error info is not row data. + * + */ + @Override + public void queryTaskResults(QueryTaskResultsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/query_tasks/{query_task_id}/results", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + QueryTaskResultsResponse.Builder responseBuilder = QueryTaskResultsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get a previously created query by id. + * + * A Looker query object includes the various parameters that define a database query that has been run or + * could be run in the future. These parameters include: model, view, fields, filters, pivots, etc. + * Query *results* are not part of the query object. + * + * Query objects are unique and immutable. Query objects are created automatically in Looker as users explore data. + * Looker does not delete them; they become part of the query history. When asked to create a query for + * any given set of parameters, Looker will first try to find an existing query object with matching + * parameters and will only create a new object when an appropriate object can not be found. + * + * This 'get' method is used to get the details about a query for a given id. See the other methods here + * to 'create' and 'run' queries. + * + * Note that some fields like 'filter_config' and 'vis_config' etc are specific to how the Looker UI + * builds queries and visualizations and are not generally useful for API use. They are not required when + * creating new queries and can usually just be ignored. + * + * + */ + @Override + public void query(QueryRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/queries/{query_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + QueryResponse.Builder responseBuilder = QueryResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get the query for a given query slug. + * + * This returns the query for the 'slug' in a query share URL. + * + * The 'slug' is a randomly chosen short string that is used as an alternative to the query's id value + * for use in URLs etc. This method exists as a convenience to help you use the API to 'find' queries that + * have been created using the Looker UI. + * + * You can use the Looker explore page to build a query and then choose the 'Share' option to + * show the share url for the query. Share urls generally look something like 'https://looker.yourcompany/x/vwGSbfc'. + * The trailing 'vwGSbfc' is the share slug. You can pass that string to this api method to get details about the query. + * Those details include the 'id' that you can use to run the query. Or, you can copy the query body + * (perhaps with your own modification) and use that as the basis to make/run new queries. + * + * This will also work with slugs from Looker explore urls like + * 'https://looker.yourcompany/explore/ecommerce/orders?qid=aogBgL6o3cKK1jN3RoZl5s'. In this case + * 'aogBgL6o3cKK1jN3RoZl5s' is the slug. + * + */ + @Override + public void queryForSlug(QueryForSlugRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/queries/slug/{slug}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + QueryForSlugResponse.Builder responseBuilder = QueryForSlugResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create a query. + * + * This allows you to create a new query that you can later run. Looker queries are immutable once created + * and are not deleted. If you create a query that is exactly like an existing query then the existing query + * will be returned and no new query will be created. Whether a new query is created or not, you can use + * the 'id' in the returned query with the 'run' method. + * + * The query parameters are passed as json in the body of the request. + * + * + */ + @Override + public void createQuery(CreateQueryRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/queries", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateQueryResponse.Builder responseBuilder = CreateQueryResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Run a saved query. + * + * This runs a previously saved query. You can use this on a query that was generated in the Looker UI + * or one that you have explicitly created using the API. You can also use a query 'id' from a saved 'Look'. + * + * The 'result_format' parameter specifies the desired structure and format of the response. + * + * Supported formats: + * + * | result_format | Description + * | :-----------: | :--- | + * | json | Plain json + * | json_detail | Row data plus metadata describing the fields, pivots, table calcs, and other aspects of the query + * | csv | Comma separated values with a header + * | txt | Tab separated values with a header + * | html | Simple html + * | md | Simple markdown + * | xlsx | MS Excel spreadsheet + * | sql | Returns the generated SQL rather than running the query + * | png | A PNG image of the visualization of the query + * | jpg | A JPG image of the visualization of the query + * + * + * + */ + @Override + public void runQuery(RunQueryRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/queries/{query_id}/run/{result_format}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + RunQueryResponse.Builder responseBuilder = RunQueryResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Run the query that is specified inline in the posted body. + * + * This allows running a query as defined in json in the posted body. This combines + * the two actions of posting & running a query into one step. + * + * Here is an example body in json: + * ``` + * { + * "model":"thelook", + * "view":"inventory_items", + * "fields":["category.name","inventory_items.days_in_inventory_tier","products.count"], + * "filters":{"category.name":"socks"}, + * "sorts":["products.count desc 0"], + * "limit":"500", + * "query_timezone":"America/Los_Angeles" + * } + * ``` + * + * When using the Ruby SDK this would be passed as a Ruby hash like: + * ``` + * { + * :model=>"thelook", + * :view=>"inventory_items", + * :fields=> + * ["category.name", + * "inventory_items.days_in_inventory_tier", + * "products.count"], + * :filters=>{:"category.name"=>"socks"}, + * :sorts=>["products.count desc 0"], + * :limit=>"500", + * :query_timezone=>"America/Los_Angeles", + * } + * ``` + * + * This will return the result of running the query in the format specified by the 'result_format' parameter. + * + * Supported formats: + * + * | result_format | Description + * | :-----------: | :--- | + * | json | Plain json + * | json_detail | Row data plus metadata describing the fields, pivots, table calcs, and other aspects of the query + * | csv | Comma separated values with a header + * | txt | Tab separated values with a header + * | html | Simple html + * | md | Simple markdown + * | xlsx | MS Excel spreadsheet + * | sql | Returns the generated SQL rather than running the query + * | png | A PNG image of the visualization of the query + * | jpg | A JPG image of the visualization of the query + * + * + * + */ + @Override + public void runInlineQuery(RunInlineQueryRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/queries/run/{result_format}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + RunInlineQueryResponse.Builder responseBuilder = RunInlineQueryResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Run an URL encoded query. + * + * This requires the caller to encode the specifiers for the query into the URL query part using + * Looker-specific syntax as explained below. + * + * Generally, you would want to use one of the methods that takes the parameters as json in the POST body + * for creating and/or running queries. This method exists for cases where one really needs to encode the + * parameters into the URL of a single 'GET' request. This matches the way that the Looker UI formats + * 'explore' URLs etc. + * + * The parameters here are very similar to the json body formatting except that the filter syntax is + * tricky. Unfortunately, this format makes this method not currently callable via the 'Try it out!' button + * in this documentation page. But, this is callable when creating URLs manually or when using the Looker SDK. + * + * Here is an example inline query URL: + * + * ``` + * https://looker.mycompany.com:19999/api/3.0/queries/models/thelook/views/inventory_items/run/json?fields=category.name,inventory_items.days_in_inventory_tier,products.count&f[category.name]=socks&sorts=products.count+desc+0&limit=500&query_timezone=America/Los_Angeles + * ``` + * + * When invoking this endpoint with the Ruby SDK, pass the query parameter parts as a hash. The hash to match the above would look like: + * + * ```ruby + * query_params = + * { + * :fields => "category.name,inventory_items.days_in_inventory_tier,products.count", + * :"f[category.name]" => "socks", + * :sorts => "products.count desc 0", + * :limit => "500", + * :query_timezone => "America/Los_Angeles" + * } + * response = ruby_sdk.run_url_encoded_query('thelook','inventory_items','json', query_params) + * + * ``` + * + * Again, it is generally easier to use the variant of this method that passes the full query in the POST body. + * This method is available for cases where other alternatives won't fit the need. + * + * Supported formats: + * + * | result_format | Description + * | :-----------: | :--- | + * | json | Plain json + * | json_detail | Row data plus metadata describing the fields, pivots, table calcs, and other aspects of the query + * | csv | Comma separated values with a header + * | txt | Tab separated values with a header + * | html | Simple html + * | md | Simple markdown + * | xlsx | MS Excel spreadsheet + * | sql | Returns the generated SQL rather than running the query + * | png | A PNG image of the visualization of the query + * | jpg | A JPG image of the visualization of the query + * + * + * + */ + @Override + public void runUrlEncodedQuery(RunUrlEncodedQueryRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/queries/models/{model_name}/views/{view_name}/run/{result_format}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + RunUrlEncodedQueryResponse.Builder responseBuilder = RunUrlEncodedQueryResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get Merge Query + * + * Returns a merge query object given its id. + * + */ + @Override + public void mergeQuery(MergeQueryRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/merge_queries/{merge_query_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + MergeQueryResponse.Builder responseBuilder = MergeQueryResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create Merge Query + * + * Creates a new merge query object. + * + * A merge query takes the results of one or more queries and combines (merges) the results + * according to field mapping definitions. The result is similar to a SQL left outer join. + * + * A merge query can merge results of queries from different SQL databases. + * + * The order that queries are defined in the source_queries array property is significant. The + * first query in the array defines the primary key into which the results of subsequent + * queries will be merged. + * + * Like model/view query objects, merge queries are immutable and have structural identity - if + * you make a request to create a new merge query that is identical to an existing merge query, + * the existing merge query will be returned instead of creating a duplicate. Conversely, any + * change to the contents of a merge query will produce a new object with a new id. + * + */ + @Override + public void createMergeQuery(CreateMergeQueryRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/merge_queries", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateMergeQueryResponse.Builder responseBuilder = CreateMergeQueryResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * Get information about all running queries. + * + */ + @Override + public void allRunningQueries(AllRunningQueriesRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/running_queries", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllRunningQueriesResponse.Builder responseBuilder = AllRunningQueriesResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * Kill a query with a specific query_task_id. + * + */ + @Override + public void killQuery(KillQueryRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/running_queries/{query_task_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + KillQueryResponse.Builder responseBuilder = KillQueryResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * Get a SQL Runner query. + */ + @Override + public void sqlQuery(SqlQueryRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/sql_queries/{slug}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SqlQueryResponse.Builder responseBuilder = SqlQueryResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create a SQL Runner Query + * + * Either the `connection_name` or `model_name` parameter MUST be provided. + * + */ + @Override + public void createSqlQuery(CreateSqlQueryRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/sql_queries", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateSqlQueryResponse.Builder responseBuilder = CreateSqlQueryResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * Execute a SQL Runner query in a given result_format. + */ + @Override + public void runSqlQuery(RunSqlQueryRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/sql_queries/{slug}/run/{result_format}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + RunSqlQueryResponse.Builder responseBuilder = RunSqlQueryResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + //#endregion Query: Run and Manage Queries + + //#region RenderTask: Manage Render Tasks + + /** + * ### Create a new task to render a look to an image. + * + * Returns a render task object. + * To check the status of a render task, pass the render_task.id to [Get Render Task](#!/RenderTask/get_render_task). + * Once the render task is complete, you can download the resulting document or image using [Get Render Task Results](#!/RenderTask/get_render_task_results). + * + * + */ + @Override + public void createLookRenderTask(CreateLookRenderTaskRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/render_tasks/looks/{look_id}/{result_format}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateLookRenderTaskResponse.Builder responseBuilder = CreateLookRenderTaskResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create a new task to render an existing query to an image. + * + * Returns a render task object. + * To check the status of a render task, pass the render_task.id to [Get Render Task](#!/RenderTask/get_render_task). + * Once the render task is complete, you can download the resulting document or image using [Get Render Task Results](#!/RenderTask/get_render_task_results). + * + * + */ + @Override + public void createQueryRenderTask(CreateQueryRenderTaskRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/render_tasks/queries/{query_id}/{result_format}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateQueryRenderTaskResponse.Builder responseBuilder = CreateQueryRenderTaskResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create a new task to render a dashboard to a document or image. + * + * Returns a render task object. + * To check the status of a render task, pass the render_task.id to [Get Render Task](#!/RenderTask/get_render_task). + * Once the render task is complete, you can download the resulting document or image using [Get Render Task Results](#!/RenderTask/get_render_task_results). + * + * + */ + @Override + public void createDashboardRenderTask(CreateDashboardRenderTaskRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/render_tasks/dashboards/{dashboard_id}/{result_format}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateDashboardRenderTaskResponse.Builder responseBuilder = CreateDashboardRenderTaskResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about a render task. + * + * Returns a render task object. + * To check the status of a render task, pass the render_task.id to [Get Render Task](#!/RenderTask/get_render_task). + * Once the render task is complete, you can download the resulting document or image using [Get Render Task Results](#!/RenderTask/get_render_task_results). + * + * + */ + @Override + public void renderTask(RenderTaskRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/render_tasks/{render_task_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + RenderTaskResponse.Builder responseBuilder = RenderTaskResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get the document or image produced by a completed render task. + * + * Note that the PDF or image result will be a binary blob in the HTTP response, as indicated by the + * Content-Type in the response headers. This may require specialized (or at least different) handling than text + * responses such as JSON. You may need to tell your HTTP client that the response is binary so that it does not + * attempt to parse the binary data as text. + * + * If the render task exists but has not finished rendering the results, the response HTTP status will be + * **202 Accepted**, the response body will be empty, and the response will have a Retry-After header indicating + * that the caller should repeat the request at a later time. + * + * Returns 404 if the render task cannot be found, if the cached result has expired, or if the caller + * does not have permission to view the results. + * + * For detailed information about the status of the render task, use [Render Task](#!/RenderTask/render_task). + * Polling loops waiting for completion of a render task would be better served by polling **render_task(id)** until + * the task status reaches completion (or error) instead of polling **render_task_results(id)** alone. + * + */ + @Override + public void renderTaskResults(RenderTaskResultsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/render_tasks/{render_task_id}/results", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + RenderTaskResultsResponse.Builder responseBuilder = RenderTaskResultsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + //#endregion RenderTask: Manage Render Tasks + + //#region Role: Manage Roles + + /** + * ### Search model sets + * Returns all model set records that match the given search criteria. + * If multiple search params are given and `filter_or` is FALSE or not specified, + * search params are combined in a logical AND operation. + * Only rows that match *all* search param criteria will be returned. + * + * If `filter_or` is TRUE, multiple search params are combined in a logical OR operation. + * Results will include rows that match **any** of the search criteria. + * + * String search params use case-insensitive matching. + * String search params can contain `%` and '_' as SQL LIKE pattern match wildcard expressions. + * example="dan%" will match "danger" and "Danzig" but not "David" + * example="D_m%" will match "Damage" and "dump" + * + * Integer search params can accept a single value or a comma separated list of values. The multiple + * values will be combined under a logical OR operation - results will match at least one of + * the given values. + * + * Most search params can accept "IS NULL" and "NOT NULL" as special expressions to match + * or exclude (respectively) rows where the column is null. + * + * Boolean search params accept only "true" and "false" as values. + * + * + */ + @Override + public void searchModelSets(SearchModelSetsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/model_sets/search", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SearchModelSetsResponse.Builder responseBuilder = SearchModelSetsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about the model set with a specific id. + * + */ + @Override + public void modelSet(ModelSetRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/model_sets/{model_set_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ModelSetResponse.Builder responseBuilder = ModelSetResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update information about the model set with a specific id. + * + */ + @Override + public void updateModelSet(UpdateModelSetRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/model_sets/{model_set_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateModelSetResponse.Builder responseBuilder = UpdateModelSetResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Delete the model set with a specific id. + * + */ + @Override + public void deleteModelSet(DeleteModelSetRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/model_sets/{model_set_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteModelSetResponse.Builder responseBuilder = DeleteModelSetResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about all model sets. + * + */ + @Override + public void allModelSets(AllModelSetsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/model_sets", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllModelSetsResponse.Builder responseBuilder = AllModelSetsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create a model set with the specified information. Model sets are used by Roles. + * + */ + @Override + public void createModelSet(CreateModelSetRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/model_sets", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateModelSetResponse.Builder responseBuilder = CreateModelSetResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get all supported permissions. + * + */ + @Override + public void allPermissions(AllPermissionsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/permissions", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllPermissionsResponse.Builder responseBuilder = AllPermissionsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Search permission sets + * Returns all permission set records that match the given search criteria. + * If multiple search params are given and `filter_or` is FALSE or not specified, + * search params are combined in a logical AND operation. + * Only rows that match *all* search param criteria will be returned. + * + * If `filter_or` is TRUE, multiple search params are combined in a logical OR operation. + * Results will include rows that match **any** of the search criteria. + * + * String search params use case-insensitive matching. + * String search params can contain `%` and '_' as SQL LIKE pattern match wildcard expressions. + * example="dan%" will match "danger" and "Danzig" but not "David" + * example="D_m%" will match "Damage" and "dump" + * + * Integer search params can accept a single value or a comma separated list of values. The multiple + * values will be combined under a logical OR operation - results will match at least one of + * the given values. + * + * Most search params can accept "IS NULL" and "NOT NULL" as special expressions to match + * or exclude (respectively) rows where the column is null. + * + * Boolean search params accept only "true" and "false" as values. + * + * + */ + @Override + public void searchPermissionSets(SearchPermissionSetsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/permission_sets/search", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SearchPermissionSetsResponse.Builder responseBuilder = SearchPermissionSetsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about the permission set with a specific id. + * + */ + @Override + public void permissionSet(PermissionSetRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/permission_sets/{permission_set_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + PermissionSetResponse.Builder responseBuilder = PermissionSetResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update information about the permission set with a specific id. + * + */ + @Override + public void updatePermissionSet(UpdatePermissionSetRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/permission_sets/{permission_set_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdatePermissionSetResponse.Builder responseBuilder = UpdatePermissionSetResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Delete the permission set with a specific id. + * + */ + @Override + public void deletePermissionSet(DeletePermissionSetRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/permission_sets/{permission_set_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeletePermissionSetResponse.Builder responseBuilder = DeletePermissionSetResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about all permission sets. + * + */ + @Override + public void allPermissionSets(AllPermissionSetsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/permission_sets", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllPermissionSetsResponse.Builder responseBuilder = AllPermissionSetsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create a permission set with the specified information. Permission sets are used by Roles. + * + */ + @Override + public void createPermissionSet(CreatePermissionSetRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/permission_sets", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreatePermissionSetResponse.Builder responseBuilder = CreatePermissionSetResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about all roles. + * + */ + @Override + public void allRoles(AllRolesRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/roles", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllRolesResponse.Builder responseBuilder = AllRolesResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create a role with the specified information. + * + */ + @Override + public void createRole(CreateRoleRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/roles", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateRoleResponse.Builder responseBuilder = CreateRoleResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Search roles + * + * Returns all role records that match the given search criteria. + * + * If multiple search params are given and `filter_or` is FALSE or not specified, + * search params are combined in a logical AND operation. + * Only rows that match *all* search param criteria will be returned. + * + * If `filter_or` is TRUE, multiple search params are combined in a logical OR operation. + * Results will include rows that match **any** of the search criteria. + * + * String search params use case-insensitive matching. + * String search params can contain `%` and '_' as SQL LIKE pattern match wildcard expressions. + * example="dan%" will match "danger" and "Danzig" but not "David" + * example="D_m%" will match "Damage" and "dump" + * + * Integer search params can accept a single value or a comma separated list of values. The multiple + * values will be combined under a logical OR operation - results will match at least one of + * the given values. + * + * Most search params can accept "IS NULL" and "NOT NULL" as special expressions to match + * or exclude (respectively) rows where the column is null. + * + * Boolean search params accept only "true" and "false" as values. + * + * + */ + @Override + public void searchRoles(SearchRolesRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/roles/search", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SearchRolesResponse.Builder responseBuilder = SearchRolesResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about the role with a specific id. + * + */ + @Override + public void role(RoleRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/roles/{role_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + RoleResponse.Builder responseBuilder = RoleResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update information about the role with a specific id. + * + */ + @Override + public void updateRole(UpdateRoleRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/roles/{role_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateRoleResponse.Builder responseBuilder = UpdateRoleResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Delete the role with a specific id. + * + */ + @Override + public void deleteRole(DeleteRoleRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/roles/{role_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteRoleResponse.Builder responseBuilder = DeleteRoleResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about all the groups with the role that has a specific id. + * + */ + @Override + public void roleGroups(RoleGroupsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/roles/{role_id}/groups", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + RoleGroupsResponse.Builder responseBuilder = RoleGroupsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Set all groups for a role, removing all existing group associations from that role. + * + */ + @Override + public void setRoleGroups(SetRoleGroupsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.put("/roles/{role_id}/groups", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SetRoleGroupsResponse.Builder responseBuilder = SetRoleGroupsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about all the users with the role that has a specific id. + * + */ + @Override + public void roleUsers(RoleUsersRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/roles/{role_id}/users", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + RoleUsersResponse.Builder responseBuilder = RoleUsersResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Set all the users of the role with a specific id. + * + */ + @Override + public void setRoleUsers(SetRoleUsersRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.put("/roles/{role_id}/users", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SetRoleUsersResponse.Builder responseBuilder = SetRoleUsersResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + //#endregion Role: Manage Roles + + //#region ScheduledPlan: Manage Scheduled Plans + + /** + * ### Get Scheduled Plans for a Space + * + * Returns scheduled plans owned by the caller for a given space id. + * + */ + @Override + public void scheduledPlansForSpace(ScheduledPlansForSpaceRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/scheduled_plans/space/{space_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ScheduledPlansForSpaceResponse.Builder responseBuilder = ScheduledPlansForSpaceResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get Information About a Scheduled Plan + * + * Admins can fetch information about other users' Scheduled Plans. + * + */ + @Override + public void scheduledPlan(ScheduledPlanRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/scheduled_plans/{scheduled_plan_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ScheduledPlanResponse.Builder responseBuilder = ScheduledPlanResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update a Scheduled Plan + * + * Admins can update other users' Scheduled Plans. + * + * Note: Any scheduled plan destinations specified in an update will **replace** all scheduled plan destinations + * currently defined for the scheduled plan. + * + * For Example: If a scheduled plan has destinations A, B, and C, and you call update on this scheduled plan + * specifying only B in the destinations, then destinations A and C will be deleted by the update. + * + * Updating a scheduled plan to assign null or an empty array to the scheduled_plan_destinations property is an error, as a scheduled plan must always have at least one destination. + * + * If you omit the scheduled_plan_destinations property from the object passed to update, then the destinations + * defined on the original scheduled plan will remain unchanged. + * + * #### Email Permissions: + * + * For details about permissions required to schedule delivery to email and the safeguards + * Looker offers to protect against sending to unauthorized email destinations, see [Email Domain Whitelist for Scheduled Looks](https://docs.looker.com/r/api/embed-permissions). + * + * + * #### Scheduled Plan Destination Formats + * + * Scheduled plan destinations must specify the data format to produce and send to the destination. + * + * Formats: + * + * | format | Description + * | :-----------: | :--- | + * | json | A JSON object containing a `data` property which contains an array of JSON objects, one per row. No metadata. + * | json_detail | Row data plus metadata describing the fields, pivots, table calcs, and other aspects of the query + * | inline_json | Same as the JSON format, except that the `data` property is a string containing JSON-escaped row data. Additional properties describe the data operation. This format is primarily used to send data to web hooks so that the web hook doesn't have to re-encode the JSON row data in order to pass it on to its ultimate destination. + * | csv | Comma separated values with a header + * | txt | Tab separated values with a header + * | html | Simple html + * | xlsx | MS Excel spreadsheet + * | wysiwyg_pdf | Dashboard rendered in a tiled layout to produce a PDF document + * | assembled_pdf | Dashboard rendered in a single column layout to produce a PDF document + * | wysiwyg_png | Dashboard rendered in a tiled layout to produce a PNG image + * || + * + * Valid formats vary by destination type and source object. `wysiwyg_pdf` is only valid for dashboards, for example. + * + * + * + */ + @Override + public void updateScheduledPlan(UpdateScheduledPlanRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/scheduled_plans/{scheduled_plan_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateScheduledPlanResponse.Builder responseBuilder = UpdateScheduledPlanResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Delete a Scheduled Plan + * + * Normal users can only delete their own scheduled plans. + * Admins can delete other users' scheduled plans. + * This delete cannot be undone. + * + */ + @Override + public void deleteScheduledPlan(DeleteScheduledPlanRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/scheduled_plans/{scheduled_plan_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteScheduledPlanResponse.Builder responseBuilder = DeleteScheduledPlanResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### List All Scheduled Plans + * + * Returns all scheduled plans which belong to the caller or given user. + * + * If no user_id is provided, this function returns the scheduled plans owned by the caller. + * + * + * To list all schedules for all users, pass `all_users=true`. + * + * + * The caller must have `see_schedules` permission to see other users' scheduled plans. + * + * + * + */ + @Override + public void allScheduledPlans(AllScheduledPlansRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/scheduled_plans", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllScheduledPlansResponse.Builder responseBuilder = AllScheduledPlansResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create a Scheduled Plan + * + * Create a scheduled plan to render a Look or Dashboard on a recurring schedule. + * + * To create a scheduled plan, you MUST provide values for the following fields: + * `name` + * and + * `look_id`, `dashboard_id`, `lookml_dashboard_id`, or `query_id` + * and + * `cron_tab` or `datagroup` + * and + * at least one scheduled_plan_destination + * + * A scheduled plan MUST have at least one scheduled_plan_destination defined. + * + * When `look_id` is set, `require_no_results`, `require_results`, and `require_change` are all required. + * + * If `create_scheduled_plan` fails with a 422 error, be sure to look at the error messages in the response which will explain exactly what fields are missing or values that are incompatible. + * + * The queries that provide the data for the look or dashboard are run in the context of user account that owns the scheduled plan. + * + * When `run_as_recipient` is `false` or not specified, the queries that provide the data for the + * look or dashboard are run in the context of user account that owns the scheduled plan. + * + * When `run_as_recipient` is `true` and all the email recipients are Looker user accounts, the + * queries are run in the context of each recipient, so different recipients may see different + * data from the same scheduled render of a look or dashboard. For more details, see [Run As Recipient](https://looker.com/docs/r/admin/run-as-recipient). + * + * Admins can create and modify scheduled plans on behalf of other users by specifying a user id. + * Non-admin users may not create or modify scheduled plans by or for other users. + * + * #### Email Permissions: + * + * For details about permissions required to schedule delivery to email and the safeguards + * Looker offers to protect against sending to unauthorized email destinations, see [Email Domain Whitelist for Scheduled Looks](https://docs.looker.com/r/api/embed-permissions). + * + * + * #### Scheduled Plan Destination Formats + * + * Scheduled plan destinations must specify the data format to produce and send to the destination. + * + * Formats: + * + * | format | Description + * | :-----------: | :--- | + * | json | A JSON object containing a `data` property which contains an array of JSON objects, one per row. No metadata. + * | json_detail | Row data plus metadata describing the fields, pivots, table calcs, and other aspects of the query + * | inline_json | Same as the JSON format, except that the `data` property is a string containing JSON-escaped row data. Additional properties describe the data operation. This format is primarily used to send data to web hooks so that the web hook doesn't have to re-encode the JSON row data in order to pass it on to its ultimate destination. + * | csv | Comma separated values with a header + * | txt | Tab separated values with a header + * | html | Simple html + * | xlsx | MS Excel spreadsheet + * | wysiwyg_pdf | Dashboard rendered in a tiled layout to produce a PDF document + * | assembled_pdf | Dashboard rendered in a single column layout to produce a PDF document + * | wysiwyg_png | Dashboard rendered in a tiled layout to produce a PNG image + * || + * + * Valid formats vary by destination type and source object. `wysiwyg_pdf` is only valid for dashboards, for example. + * + * + * + */ + @Override + public void createScheduledPlan(CreateScheduledPlanRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/scheduled_plans", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateScheduledPlanResponse.Builder responseBuilder = CreateScheduledPlanResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Run a Scheduled Plan Immediately + * + * Create a scheduled plan that runs only once, and immediately. + * + * This can be useful for testing a Scheduled Plan before committing to a production schedule. + * + * Admins can create scheduled plans on behalf of other users by specifying a user id. + * + * This API is rate limited to prevent it from being used for relay spam or DoS attacks + * + * #### Email Permissions: + * + * For details about permissions required to schedule delivery to email and the safeguards + * Looker offers to protect against sending to unauthorized email destinations, see [Email Domain Whitelist for Scheduled Looks](https://docs.looker.com/r/api/embed-permissions). + * + * + * #### Scheduled Plan Destination Formats + * + * Scheduled plan destinations must specify the data format to produce and send to the destination. + * + * Formats: + * + * | format | Description + * | :-----------: | :--- | + * | json | A JSON object containing a `data` property which contains an array of JSON objects, one per row. No metadata. + * | json_detail | Row data plus metadata describing the fields, pivots, table calcs, and other aspects of the query + * | inline_json | Same as the JSON format, except that the `data` property is a string containing JSON-escaped row data. Additional properties describe the data operation. This format is primarily used to send data to web hooks so that the web hook doesn't have to re-encode the JSON row data in order to pass it on to its ultimate destination. + * | csv | Comma separated values with a header + * | txt | Tab separated values with a header + * | html | Simple html + * | xlsx | MS Excel spreadsheet + * | wysiwyg_pdf | Dashboard rendered in a tiled layout to produce a PDF document + * | assembled_pdf | Dashboard rendered in a single column layout to produce a PDF document + * | wysiwyg_png | Dashboard rendered in a tiled layout to produce a PNG image + * || + * + * Valid formats vary by destination type and source object. `wysiwyg_pdf` is only valid for dashboards, for example. + * + * + * + */ + @Override + public void scheduledPlanRunOnce(ScheduledPlanRunOnceRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/scheduled_plans/run_once", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ScheduledPlanRunOnceResponse.Builder responseBuilder = ScheduledPlanRunOnceResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get Scheduled Plans for a Look + * + * Returns all scheduled plans for a look which belong to the caller or given user. + * + * If no user_id is provided, this function returns the scheduled plans owned by the caller. + * + * + * To list all schedules for all users, pass `all_users=true`. + * + * + * The caller must have `see_schedules` permission to see other users' scheduled plans. + * + * + * + */ + @Override + public void scheduledPlansForLook(ScheduledPlansForLookRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/scheduled_plans/look/{look_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ScheduledPlansForLookResponse.Builder responseBuilder = ScheduledPlansForLookResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get Scheduled Plans for a Dashboard + * + * Returns all scheduled plans for a dashboard which belong to the caller or given user. + * + * If no user_id is provided, this function returns the scheduled plans owned by the caller. + * + * + * To list all schedules for all users, pass `all_users=true`. + * + * + * The caller must have `see_schedules` permission to see other users' scheduled plans. + * + * + * + */ + @Override + public void scheduledPlansForDashboard(ScheduledPlansForDashboardRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/scheduled_plans/dashboard/{dashboard_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ScheduledPlansForDashboardResponse.Builder responseBuilder = ScheduledPlansForDashboardResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get Scheduled Plans for a LookML Dashboard + * + * Returns all scheduled plans for a LookML Dashboard which belong to the caller or given user. + * + * If no user_id is provided, this function returns the scheduled plans owned by the caller. + * + * + * To list all schedules for all users, pass `all_users=true`. + * + * + * The caller must have `see_schedules` permission to see other users' scheduled plans. + * + * + * + */ + @Override + public void scheduledPlansForLookmlDashboard(ScheduledPlansForLookmlDashboardRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/scheduled_plans/lookml_dashboard/{lookml_dashboard_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ScheduledPlansForLookmlDashboardResponse.Builder responseBuilder = ScheduledPlansForLookmlDashboardResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Run a Scheduled Plan By Id Immediately + * This function creates a run-once schedule plan based on an existing scheduled plan, + * applies modifications (if any) to the new scheduled plan, and runs the new schedule plan immediately. + * This can be useful for testing modifications to an existing scheduled plan before committing to a production schedule. + * + * This function internally performs the following operations: + * + * 1. Copies the properties of the existing scheduled plan into a new scheduled plan + * 2. Copies any properties passed in the JSON body of this request into the new scheduled plan (replacing the original values) + * 3. Creates the new scheduled plan + * 4. Runs the new scheduled plan + * + * The original scheduled plan is not modified by this operation. + * Admins can create, modify, and run scheduled plans on behalf of other users by specifying a user id. + * Non-admins can only create, modify, and run their own scheduled plans. + * + * #### Email Permissions: + * + * For details about permissions required to schedule delivery to email and the safeguards + * Looker offers to protect against sending to unauthorized email destinations, see [Email Domain Whitelist for Scheduled Looks](https://docs.looker.com/r/api/embed-permissions). + * + * + * #### Scheduled Plan Destination Formats + * + * Scheduled plan destinations must specify the data format to produce and send to the destination. + * + * Formats: + * + * | format | Description + * | :-----------: | :--- | + * | json | A JSON object containing a `data` property which contains an array of JSON objects, one per row. No metadata. + * | json_detail | Row data plus metadata describing the fields, pivots, table calcs, and other aspects of the query + * | inline_json | Same as the JSON format, except that the `data` property is a string containing JSON-escaped row data. Additional properties describe the data operation. This format is primarily used to send data to web hooks so that the web hook doesn't have to re-encode the JSON row data in order to pass it on to its ultimate destination. + * | csv | Comma separated values with a header + * | txt | Tab separated values with a header + * | html | Simple html + * | xlsx | MS Excel spreadsheet + * | wysiwyg_pdf | Dashboard rendered in a tiled layout to produce a PDF document + * | assembled_pdf | Dashboard rendered in a single column layout to produce a PDF document + * | wysiwyg_png | Dashboard rendered in a tiled layout to produce a PNG image + * || + * + * Valid formats vary by destination type and source object. `wysiwyg_pdf` is only valid for dashboards, for example. + * + * + * + * This API is rate limited to prevent it from being used for relay spam or DoS attacks + * + * + */ + @Override + public void scheduledPlanRunOnceById(ScheduledPlanRunOnceByIdRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/scheduled_plans/{scheduled_plan_id}/run_once", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ScheduledPlanRunOnceByIdResponse.Builder responseBuilder = ScheduledPlanRunOnceByIdResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + //#endregion ScheduledPlan: Manage Scheduled Plans + + //#region Session: Session Information + + /** + * ### Get API Session + * + * Returns information about the current API session, such as which workspace is selected for the session. + * + */ + @Override + public void session(SessionRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/session", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SessionResponse.Builder responseBuilder = SessionResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update API Session + * + * #### API Session Workspace + * + * You can use this endpoint to change the active workspace for the current API session. + * + * Only one workspace can be active in a session. The active workspace can be changed + * any number of times in a session. + * + * The default workspace for API sessions is the "production" workspace. + * + * All Looker APIs that use projects or lookml models (such as running queries) will + * use the version of project and model files defined by this workspace for the lifetime of the + * current API session or until the session workspace is changed again. + * + * An API session has the same lifetime as the access_token used to authenticate API requests. Each successful + * API login generates a new access_token and a new API session. + * + * If your Looker API client application needs to work in a dev workspace across multiple + * API sessions, be sure to select the dev workspace after each login. + * + */ + @Override + public void updateSession(UpdateSessionRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/session", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateSessionResponse.Builder responseBuilder = UpdateSessionResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + //#endregion Session: Session Information + + //#region Theme: Manage Themes + + /** + * ### Get an array of all existing themes + * + * Get a **single theme** by id with [Theme](#!/Theme/theme) + * + * This method returns an array of all existing themes. The active time for the theme is not considered. + * + * **Note**: Custom themes needs to be enabled by Looker. Unless custom themes are enabled, only the automatically generated default theme can be used. Please contact your Account Manager or help.looker.com to update your license for this feature. + * + * + */ + @Override + public void allThemes(AllThemesRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/themes", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllThemesResponse.Builder responseBuilder = AllThemesResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create a theme + * + * Creates a new theme object, returning the theme details, including the created id. + * + * If `settings` are not specified, the default theme settings will be copied into the new theme. + * + * The theme `name` can only contain alphanumeric characters or underscores. Theme names should not contain any confidential information, such as customer names. + * + * **Update** an existing theme with [Update Theme](#!/Theme/update_theme) + * + * **Permanently delete** an existing theme with [Delete Theme](#!/Theme/delete_theme) + * + * For more information, see [Creating and Applying Themes](https://looker.com/docs/r/admin/themes). + * + * **Note**: Custom themes needs to be enabled by Looker. Unless custom themes are enabled, only the automatically generated default theme can be used. Please contact your Account Manager or help.looker.com to update your license for this feature. + * + * + */ + @Override + public void createTheme(CreateThemeRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/themes", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateThemeResponse.Builder responseBuilder = CreateThemeResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Search all themes for matching criteria. + * + * Returns an **array of theme objects** that match the specified search criteria. + * + * | Search Parameters | Description + * | :-------------------: | :------ | + * | `begin_at` only | Find themes active at or after `begin_at` + * | `end_at` only | Find themes active at or before `end_at` + * | both set | Find themes with an active inclusive period between `begin_at` and `end_at` + * + * Note: Range matching requires boolean AND logic. + * When using `begin_at` and `end_at` together, do not use `filter_or`=TRUE + * + * If multiple search params are given and `filter_or` is FALSE or not specified, + * search params are combined in a logical AND operation. + * Only rows that match *all* search param criteria will be returned. + * + * If `filter_or` is TRUE, multiple search params are combined in a logical OR operation. + * Results will include rows that match **any** of the search criteria. + * + * String search params use case-insensitive matching. + * String search params can contain `%` and '_' as SQL LIKE pattern match wildcard expressions. + * example="dan%" will match "danger" and "Danzig" but not "David" + * example="D_m%" will match "Damage" and "dump" + * + * Integer search params can accept a single value or a comma separated list of values. The multiple + * values will be combined under a logical OR operation - results will match at least one of + * the given values. + * + * Most search params can accept "IS NULL" and "NOT NULL" as special expressions to match + * or exclude (respectively) rows where the column is null. + * + * Boolean search params accept only "true" and "false" as values. + * + * + * Get a **single theme** by id with [Theme](#!/Theme/theme) + * + * **Note**: Custom themes needs to be enabled by Looker. Unless custom themes are enabled, only the automatically generated default theme can be used. Please contact your Account Manager or help.looker.com to update your license for this feature. + * + * + */ + @Override + public void searchThemes(SearchThemesRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/themes/search", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SearchThemesResponse.Builder responseBuilder = SearchThemesResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get the default theme + * + * Returns the active theme object set as the default. + * + * The **default** theme name can be set in the UI on the Admin|Theme UI page + * + * The optional `ts` parameter can specify a different timestamp than "now." If specified, it returns the default theme at the time indicated. + * + */ + @Override + public void defaultTheme(DefaultThemeRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/themes/default", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DefaultThemeResponse.Builder responseBuilder = DefaultThemeResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Set the global default theme by theme name + * + * Only Admin users can call this function. + * + * Only an active theme with no expiration (`end_at` not set) can be assigned as the default theme. As long as a theme has an active record with no expiration, it can be set as the default. + * + * [Create Theme](#!/Theme/create) has detailed information on rules for default and active themes + * + * Returns the new specified default theme object. + * + * **Note**: Custom themes needs to be enabled by Looker. Unless custom themes are enabled, only the automatically generated default theme can be used. Please contact your Account Manager or help.looker.com to update your license for this feature. + * + * + */ + @Override + public void setDefaultTheme(SetDefaultThemeRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.put("/themes/default", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SetDefaultThemeResponse.Builder responseBuilder = SetDefaultThemeResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get active themes + * + * Returns an array of active themes. + * + * If the `name` parameter is specified, it will return an array with one theme if it's active and found. + * + * The optional `ts` parameter can specify a different timestamp than "now." + * + * **Note**: Custom themes needs to be enabled by Looker. Unless custom themes are enabled, only the automatically generated default theme can be used. Please contact your Account Manager or help.looker.com to update your license for this feature. + * + * + * + */ + @Override + public void activeThemes(ActiveThemesRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/themes/active", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ActiveThemesResponse.Builder responseBuilder = ActiveThemesResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get the named theme if it's active. Otherwise, return the default theme + * + * The optional `ts` parameter can specify a different timestamp than "now." + * Note: API users with `show` ability can call this function + * + * **Note**: Custom themes needs to be enabled by Looker. Unless custom themes are enabled, only the automatically generated default theme can be used. Please contact your Account Manager or help.looker.com to update your license for this feature. + * + * + */ + @Override + public void themeOrDefault(ThemeOrDefaultRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/themes/theme_or_default", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ThemeOrDefaultResponse.Builder responseBuilder = ThemeOrDefaultResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Validate a theme with the specified information + * + * Validates all values set for the theme, returning any errors encountered, or 200 OK if valid + * + * See [Create Theme](#!/Theme/create_theme) for constraints + * + * **Note**: Custom themes needs to be enabled by Looker. Unless custom themes are enabled, only the automatically generated default theme can be used. Please contact your Account Manager or help.looker.com to update your license for this feature. + * + * + */ + @Override + public void validateTheme(ValidateThemeRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/themes/validate", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ValidateThemeResponse.Builder responseBuilder = ValidateThemeResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get a theme by ID + * + * Use this to retrieve a specific theme, whether or not it's currently active. + * + * **Note**: Custom themes needs to be enabled by Looker. Unless custom themes are enabled, only the automatically generated default theme can be used. Please contact your Account Manager or help.looker.com to update your license for this feature. + * + * + */ + @Override + public void theme(ThemeRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/themes/{theme_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ThemeResponse.Builder responseBuilder = ThemeResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update the theme by id. + * + * **Note**: Custom themes needs to be enabled by Looker. Unless custom themes are enabled, only the automatically generated default theme can be used. Please contact your Account Manager or help.looker.com to update your license for this feature. + * + * + */ + @Override + public void updateTheme(UpdateThemeRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/themes/{theme_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateThemeResponse.Builder responseBuilder = UpdateThemeResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Delete a specific theme by id + * + * This operation permanently deletes the identified theme from the database. + * + * Because multiple themes can have the same name (with different activation time spans) themes can only be deleted by ID. + * + * All IDs associated with a theme name can be retrieved by searching for the theme name with [Theme Search](#!/Theme/search). + * + * **Note**: Custom themes needs to be enabled by Looker. Unless custom themes are enabled, only the automatically generated default theme can be used. Please contact your Account Manager or help.looker.com to update your license for this feature. + * + * + */ + @Override + public void deleteTheme(DeleteThemeRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/themes/{theme_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteThemeResponse.Builder responseBuilder = DeleteThemeResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + //#endregion Theme: Manage Themes + + //#region User: Manage Users + + /** + * ### Get information about the current user; i.e. the user account currently calling the API. + * + */ + @Override + public void me(MeRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/user", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + MeResponse.Builder responseBuilder = MeResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about all users. + * + */ + @Override + public void allUsers(AllUsersRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/users", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllUsersResponse.Builder responseBuilder = AllUsersResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create a user with the specified information. + * + */ + @Override + public void createUser(CreateUserRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/users", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateUserResponse.Builder responseBuilder = CreateUserResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Search users + * + * Returns all* user records that match the given search criteria. + * + * If multiple search params are given and `filter_or` is FALSE or not specified, + * search params are combined in a logical AND operation. + * Only rows that match *all* search param criteria will be returned. + * + * If `filter_or` is TRUE, multiple search params are combined in a logical OR operation. + * Results will include rows that match **any** of the search criteria. + * + * String search params use case-insensitive matching. + * String search params can contain `%` and '_' as SQL LIKE pattern match wildcard expressions. + * example="dan%" will match "danger" and "Danzig" but not "David" + * example="D_m%" will match "Damage" and "dump" + * + * Integer search params can accept a single value or a comma separated list of values. The multiple + * values will be combined under a logical OR operation - results will match at least one of + * the given values. + * + * Most search params can accept "IS NULL" and "NOT NULL" as special expressions to match + * or exclude (respectively) rows where the column is null. + * + * Boolean search params accept only "true" and "false" as values. + * + * + * (*) Results are always filtered to the level of information the caller is permitted to view. + * Looker admins can see all user details; normal users in an open system can see + * names of other users but no details; normal users in a closed system can only see + * names of other users who are members of the same group as the user. + * + * + */ + @Override + public void searchUsers(SearchUsersRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/users/search", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SearchUsersResponse.Builder responseBuilder = SearchUsersResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Search for user accounts by name + * + * Returns all user accounts where `first_name` OR `last_name` OR `email` field values match a pattern. + * The pattern can contain `%` and `_` wildcards as in SQL LIKE expressions. + * + * Any additional search params will be combined into a logical AND expression. + * + */ + @Override + public void searchUsersNames(SearchUsersNamesRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/users/search/names/{pattern}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SearchUsersNamesResponse.Builder responseBuilder = SearchUsersNamesResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about the user with a specific id. + * + * If the caller is an admin or the caller is the user being specified, then full user information will + * be returned. Otherwise, a minimal 'public' variant of the user information will be returned. This contains + * The user name and avatar url, but no sensitive information. + * + */ + @Override + public void user(UserRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/users/{user_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UserResponse.Builder responseBuilder = UserResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update information about the user with a specific id. + * + */ + @Override + public void updateUser(UpdateUserRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/users/{user_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateUserResponse.Builder responseBuilder = UpdateUserResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Delete the user with a specific id. + * + * **DANGER** this will delete the user and all looks and other information owned by the user. + * + */ + @Override + public void deleteUser(DeleteUserRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/users/{user_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteUserResponse.Builder responseBuilder = DeleteUserResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about the user with a credential of given type with specific id. + * + * This is used to do things like find users by their embed external_user_id. Or, find the user with + * a given api3 client_id, etc. The 'credential_type' matchs the 'type' name of the various credential + * types. It must be one of the values listed in the table below. The 'credential_id' is your unique Id + * for the user and is specific to each type of credential. + * + * An example using the Ruby sdk might look like: + * + * `sdk.user_for_credential('embed', 'customer-4959425')` + * + * This table shows the supported 'Credential Type' strings. The right column is for reference; it shows + * which field in the given credential type is actually searched when finding a user with the supplied + * 'credential_id'. + * + * | Credential Types | Id Field Matched | + * | ---------------- | ---------------- | + * | email | email | + * | google | google_user_id | + * | saml | saml_user_id | + * | oidc | oidc_user_id | + * | ldap | ldap_id | + * | api | token | + * | api3 | client_id | + * | embed | external_user_id | + * | looker_openid | email | + * + * NOTE: The 'api' credential type was only used with the legacy Looker query API and is no longer supported. The credential type for API you are currently looking at is 'api3'. + * + * + */ + @Override + public void userForCredential(UserForCredentialRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/users/credential/{credential_type}/{credential_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UserForCredentialResponse.Builder responseBuilder = UserForCredentialResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Email/password login information for the specified user. + */ + @Override + public void userCredentialsEmail(UserCredentialsEmailRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/users/{user_id}/credentials_email", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UserCredentialsEmailResponse.Builder responseBuilder = UserCredentialsEmailResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Email/password login information for the specified user. + */ + @Override + public void createUserCredentialsEmail(CreateUserCredentialsEmailRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/users/{user_id}/credentials_email", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateUserCredentialsEmailResponse.Builder responseBuilder = CreateUserCredentialsEmailResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Email/password login information for the specified user. + */ + @Override + public void updateUserCredentialsEmail(UpdateUserCredentialsEmailRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/users/{user_id}/credentials_email", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateUserCredentialsEmailResponse.Builder responseBuilder = UpdateUserCredentialsEmailResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Email/password login information for the specified user. + */ + @Override + public void deleteUserCredentialsEmail(DeleteUserCredentialsEmailRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/users/{user_id}/credentials_email", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteUserCredentialsEmailResponse.Builder responseBuilder = DeleteUserCredentialsEmailResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Two-factor login information for the specified user. + */ + @Override + public void userCredentialsTotp(UserCredentialsTotpRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/users/{user_id}/credentials_totp", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UserCredentialsTotpResponse.Builder responseBuilder = UserCredentialsTotpResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Two-factor login information for the specified user. + */ + @Override + public void createUserCredentialsTotp(CreateUserCredentialsTotpRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/users/{user_id}/credentials_totp", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateUserCredentialsTotpResponse.Builder responseBuilder = CreateUserCredentialsTotpResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Two-factor login information for the specified user. + */ + @Override + public void deleteUserCredentialsTotp(DeleteUserCredentialsTotpRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/users/{user_id}/credentials_totp", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteUserCredentialsTotpResponse.Builder responseBuilder = DeleteUserCredentialsTotpResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### LDAP login information for the specified user. + */ + @Override + public void userCredentialsLdap(UserCredentialsLdapRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/users/{user_id}/credentials_ldap", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UserCredentialsLdapResponse.Builder responseBuilder = UserCredentialsLdapResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### LDAP login information for the specified user. + */ + @Override + public void deleteUserCredentialsLdap(DeleteUserCredentialsLdapRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/users/{user_id}/credentials_ldap", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteUserCredentialsLdapResponse.Builder responseBuilder = DeleteUserCredentialsLdapResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Google authentication login information for the specified user. + */ + @Override + public void userCredentialsGoogle(UserCredentialsGoogleRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/users/{user_id}/credentials_google", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UserCredentialsGoogleResponse.Builder responseBuilder = UserCredentialsGoogleResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Google authentication login information for the specified user. + */ + @Override + public void deleteUserCredentialsGoogle(DeleteUserCredentialsGoogleRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/users/{user_id}/credentials_google", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteUserCredentialsGoogleResponse.Builder responseBuilder = DeleteUserCredentialsGoogleResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Saml authentication login information for the specified user. + */ + @Override + public void userCredentialsSaml(UserCredentialsSamlRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/users/{user_id}/credentials_saml", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UserCredentialsSamlResponse.Builder responseBuilder = UserCredentialsSamlResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Saml authentication login information for the specified user. + */ + @Override + public void deleteUserCredentialsSaml(DeleteUserCredentialsSamlRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/users/{user_id}/credentials_saml", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteUserCredentialsSamlResponse.Builder responseBuilder = DeleteUserCredentialsSamlResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### OpenID Connect (OIDC) authentication login information for the specified user. + */ + @Override + public void userCredentialsOidc(UserCredentialsOidcRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/users/{user_id}/credentials_oidc", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UserCredentialsOidcResponse.Builder responseBuilder = UserCredentialsOidcResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### OpenID Connect (OIDC) authentication login information for the specified user. + */ + @Override + public void deleteUserCredentialsOidc(DeleteUserCredentialsOidcRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/users/{user_id}/credentials_oidc", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteUserCredentialsOidcResponse.Builder responseBuilder = DeleteUserCredentialsOidcResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### API 3 login information for the specified user. This is for the newer API keys that can be added for any user. + */ + @Override + public void userCredentialsApi3(UserCredentialsApi3Request request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/users/{user_id}/credentials_api3/{credentials_api3_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UserCredentialsApi3Response.Builder responseBuilder = UserCredentialsApi3Response.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### API 3 login information for the specified user. This is for the newer API keys that can be added for any user. + */ + @Override + public void deleteUserCredentialsApi3(DeleteUserCredentialsApi3Request request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/users/{user_id}/credentials_api3/{credentials_api3_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteUserCredentialsApi3Response.Builder responseBuilder = DeleteUserCredentialsApi3Response.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### API 3 login information for the specified user. This is for the newer API keys that can be added for any user. + */ + @Override + public void allUserCredentialsApi3s(AllUserCredentialsApi3sRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/users/{user_id}/credentials_api3", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllUserCredentialsApi3sResponse.Builder responseBuilder = AllUserCredentialsApi3sResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### API 3 login information for the specified user. This is for the newer API keys that can be added for any user. + */ + @Override + public void createUserCredentialsApi3(CreateUserCredentialsApi3Request request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/users/{user_id}/credentials_api3", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateUserCredentialsApi3Response.Builder responseBuilder = CreateUserCredentialsApi3Response.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Embed login information for the specified user. + */ + @Override + public void userCredentialsEmbed(UserCredentialsEmbedRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/users/{user_id}/credentials_embed/{credentials_embed_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UserCredentialsEmbedResponse.Builder responseBuilder = UserCredentialsEmbedResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Embed login information for the specified user. + */ + @Override + public void deleteUserCredentialsEmbed(DeleteUserCredentialsEmbedRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/users/{user_id}/credentials_embed/{credentials_embed_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteUserCredentialsEmbedResponse.Builder responseBuilder = DeleteUserCredentialsEmbedResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Embed login information for the specified user. + */ + @Override + public void allUserCredentialsEmbeds(AllUserCredentialsEmbedsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/users/{user_id}/credentials_embed", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllUserCredentialsEmbedsResponse.Builder responseBuilder = AllUserCredentialsEmbedsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Looker Openid login information for the specified user. Used by Looker Analysts. + */ + @Override + public void userCredentialsLookerOpenid(UserCredentialsLookerOpenidRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/users/{user_id}/credentials_looker_openid", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UserCredentialsLookerOpenidResponse.Builder responseBuilder = UserCredentialsLookerOpenidResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Looker Openid login information for the specified user. Used by Looker Analysts. + */ + @Override + public void deleteUserCredentialsLookerOpenid(DeleteUserCredentialsLookerOpenidRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/users/{user_id}/credentials_looker_openid", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteUserCredentialsLookerOpenidResponse.Builder responseBuilder = DeleteUserCredentialsLookerOpenidResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Web login session for the specified user. + */ + @Override + public void userSession(UserSessionRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/users/{user_id}/sessions/{session_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UserSessionResponse.Builder responseBuilder = UserSessionResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Web login session for the specified user. + */ + @Override + public void deleteUserSession(DeleteUserSessionRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/users/{user_id}/sessions/{session_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteUserSessionResponse.Builder responseBuilder = DeleteUserSessionResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Web login session for the specified user. + */ + @Override + public void allUserSessions(AllUserSessionsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/users/{user_id}/sessions", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllUserSessionsResponse.Builder responseBuilder = AllUserSessionsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create a password reset token. + * This will create a cryptographically secure random password reset token for the user. + * If the user already has a password reset token then this invalidates the old token and creates a new one. + * The token is expressed as the 'password_reset_url' of the user's email/password credential object. + * This takes an optional 'expires' param to indicate if the new token should be an expiring token. + * Tokens that expire are typically used for self-service password resets for existing users. + * Invitation emails for new users typically are not set to expire. + * The expire period is always 60 minutes when expires is enabled. + * This method can be called with an empty body. + * + */ + @Override + public void createUserCredentialsEmailPasswordReset(CreateUserCredentialsEmailPasswordResetRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/users/{user_id}/credentials_email/password_reset", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateUserCredentialsEmailPasswordResetResponse.Builder responseBuilder = CreateUserCredentialsEmailPasswordResetResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about roles of a given user + * + */ + @Override + public void userRoles(UserRolesRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/users/{user_id}/roles", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UserRolesResponse.Builder responseBuilder = UserRolesResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Set roles of the user with a specific id. + * + */ + @Override + public void setUserRoles(SetUserRolesRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.put("/users/{user_id}/roles", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SetUserRolesResponse.Builder responseBuilder = SetUserRolesResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get user attribute values for a given user. + * + * Returns the values of specified user attributes (or all user attributes) for a certain user. + * + * A value for each user attribute is searched for in the following locations, in this order: + * + * 1. in the user's account information + * 1. in groups that the user is a member of + * 1. the default value of the user attribute + * + * If more than one group has a value defined for a user attribute, the group with the lowest rank wins. + * + * The response will only include user attributes for which values were found. Use `include_unset=true` to include + * empty records for user attributes with no value. + * + * The value of all hidden user attributes will be blank. + * + */ + @Override + public void userAttributeUserValues(UserAttributeUserValuesRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/users/{user_id}/attribute_values", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UserAttributeUserValuesResponse.Builder responseBuilder = UserAttributeUserValuesResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Store a custom value for a user attribute in a user's account settings. + * + * Per-user user attribute values take precedence over group or default values. + * + */ + @Override + public void setUserAttributeUserValue(SetUserAttributeUserValueRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/users/{user_id}/attribute_values/{user_attribute_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SetUserAttributeUserValueResponse.Builder responseBuilder = SetUserAttributeUserValueResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Delete a user attribute value from a user's account settings. + * + * After the user attribute value is deleted from the user's account settings, subsequent requests + * for the user attribute value for this user will draw from the user's groups or the default + * value of the user attribute. See [Get User Attribute Values](#!/User/user_attribute_user_values) for more + * information about how user attribute values are resolved. + * + */ + @Override + public void deleteUserAttributeUserValue(DeleteUserAttributeUserValueRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/users/{user_id}/attribute_values/{user_attribute_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteUserAttributeUserValueResponse.Builder responseBuilder = DeleteUserAttributeUserValueResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Send a password reset token. + * This will send a password reset email to the user. If a password reset token does not already exist + * for this user, it will create one and then send it. + * If the user has not yet set up their account, it will send a setup email to the user. + * The URL sent in the email is expressed as the 'password_reset_url' of the user's email/password credential object. + * Password reset URLs will expire in 60 minutes. + * This method can be called with an empty body. + * + */ + @Override + public void sendUserCredentialsEmailPasswordReset(SendUserCredentialsEmailPasswordResetRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/users/{user_id}/credentials_email/send_password_reset", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SendUserCredentialsEmailPasswordResetResponse.Builder responseBuilder = SendUserCredentialsEmailPasswordResetResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + //#endregion User: Manage Users + + //#region UserAttribute: Manage User Attributes + + /** + * ### Get information about all user attributes. + * + */ + @Override + public void allUserAttributes(AllUserAttributesRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/user_attributes", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllUserAttributesResponse.Builder responseBuilder = AllUserAttributesResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create a new user attribute + * + * Permission information for a user attribute is conveyed through the `can` and `user_can_edit` fields. + * The `user_can_edit` field indicates whether an attribute is user-editable _anywhere_ in the application. + * The `can` field gives more granular access information, with the `set_value` child field indicating whether + * an attribute's value can be set by [Setting the User Attribute User Value](#!/User/set_user_attribute_user_value). + * + * Note: `name` and `label` fields must be unique across all user attributes in the Looker instance. + * Attempting to create a new user attribute with a name or label that duplicates an existing + * user attribute will fail with a 422 error. + * + */ + @Override + public void createUserAttribute(CreateUserAttributeRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/user_attributes", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateUserAttributeResponse.Builder responseBuilder = CreateUserAttributeResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about a user attribute. + * + */ + @Override + public void userAttribute(UserAttributeRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/user_attributes/{user_attribute_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UserAttributeResponse.Builder responseBuilder = UserAttributeResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update a user attribute definition. + * + */ + @Override + public void updateUserAttribute(UpdateUserAttributeRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/user_attributes/{user_attribute_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateUserAttributeResponse.Builder responseBuilder = UpdateUserAttributeResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Delete a user attribute (admin only). + * + */ + @Override + public void deleteUserAttribute(DeleteUserAttributeRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/user_attributes/{user_attribute_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteUserAttributeResponse.Builder responseBuilder = DeleteUserAttributeResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Returns all values of a user attribute defined by user groups, in precedence order. + * + * A user may be a member of multiple groups which define different values for a given user attribute. + * The order of group-values in the response determines precedence for selecting which group-value applies + * to a given user. For more information, see [Set User Attribute Group Values](#!/UserAttribute/set_user_attribute_group_values). + * + * Results will only include groups that the caller's user account has permission to see. + * + */ + @Override + public void allUserAttributeGroupValues(AllUserAttributeGroupValuesRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/user_attributes/{user_attribute_id}/group_values", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllUserAttributeGroupValuesResponse.Builder responseBuilder = AllUserAttributeGroupValuesResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Define values for a user attribute across a set of groups, in priority order. + * + * This function defines all values for a user attribute defined by user groups. This is a global setting, potentially affecting + * all users in the system. This function replaces any existing group value definitions for the indicated user attribute. + * + * The value of a user attribute for a given user is determined by searching the following locations, in this order: + * + * 1. the user's account settings + * 2. the groups that the user is a member of + * 3. the default value of the user attribute, if any + * + * The user may be a member of multiple groups which define different values for that user attribute. The order of items in the group_values parameter + * determines which group takes priority for that user. Lowest array index wins. + * + * An alternate method to indicate the selection precedence of group-values is to assign numbers to the 'rank' property of each + * group-value object in the array. Lowest 'rank' value wins. If you use this technique, you must assign a + * rank value to every group-value object in the array. + * + * To set a user attribute value for a single user, see [Set User Attribute User Value](#!/User/set_user_attribute_user_value). + * To set a user attribute value for all members of a group, see [Set User Attribute Group Value](#!/Group/update_user_attribute_group_value). + * + */ + @Override + public void setUserAttributeGroupValues(SetUserAttributeGroupValuesRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/user_attributes/{user_attribute_id}/group_values", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SetUserAttributeGroupValuesResponse.Builder responseBuilder = SetUserAttributeGroupValuesResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + //#endregion UserAttribute: Manage User Attributes + + //#region Workspace: Manage Workspaces + + /** + * ### Get All Workspaces + * + * Returns all workspaces available to the calling user. + * + */ + @Override + public void allWorkspaces(AllWorkspacesRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/workspaces", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllWorkspacesResponse.Builder responseBuilder = AllWorkspacesResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get A Workspace + * + * Returns information about a workspace such as the git status and selected branches + * of all projects available to the caller's user account. + * + * A workspace defines which versions of project files will be used to evaluate expressions + * and operations that use model definitions - operations such as running queries or rendering dashboards. + * Each project has its own git repository, and each project in a workspace may be configured to reference + * particular branch or revision within their respective repositories. + * + * There are two predefined workspaces available: "production" and "dev". + * + * The production workspace is shared across all Looker users. Models in the production workspace are read-only. + * Changing files in production is accomplished by modifying files in a git branch and using Pull Requests + * to merge the changes from the dev branch into the production branch, and then telling + * Looker to sync with production. + * + * The dev workspace is local to each Looker user. Changes made to project/model files in the dev workspace only affect + * that user, and only when the dev workspace is selected as the active workspace for the API session. + * (See set_session_workspace()). + * + * The dev workspace is NOT unique to an API session. Two applications accessing the Looker API using + * the same user account will see the same files in the dev workspace. To avoid collisions between + * API clients it's best to have each client login with API3 credentials for a different user account. + * + * Changes made to files in a dev workspace are persistent across API sessions. It's a good + * idea to commit any changes you've made to the git repository, but not strictly required. Your modified files + * reside in a special user-specific directory on the Looker server and will still be there when you login in again + * later and use update_session(workspace_id: "dev") to select the dev workspace for the new API session. + * + */ + @Override + public void workspace(WorkspaceRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/workspaces/{workspace_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + WorkspaceResponse.Builder responseBuilder = WorkspaceResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + //#endregion Workspace: Manage Workspaces +} \ No newline at end of file diff --git a/proto/grpc_proxy/src/main/java/com/google/looker/server/sdk/LookerStreamingServiceImpl.java b/proto/grpc_proxy/src/main/java/com/google/looker/server/sdk/LookerStreamingServiceImpl.java new file mode 100644 index 000000000..56ebf12ec --- /dev/null +++ b/proto/grpc_proxy/src/main/java/com/google/looker/server/sdk/LookerStreamingServiceImpl.java @@ -0,0 +1,14942 @@ +// MIT License +// +// Copyright (c) 2019 Looker Data Sciences, Inc. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +// 401 API methods + + +package com.google.looker.server.sdk; + +import com.google.looker.grpc.services.*; +import com.google.looker.grpc.services.LookerStreamingServiceGrpc.LookerStreamingServiceImplBase; +import com.google.looker.server.rtl.LookerClient; +import com.google.looker.server.rtl.LookerClientResponse; +import com.google.protobuf.InvalidProtocolBufferException; +import com.google.protobuf.util.JsonFormat; +import io.grpc.Status; +import io.grpc.stub.StreamObserver; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class LookerStreamingServiceImpl extends LookerStreamingServiceImplBase { + + final private static Logger LOGGER = LoggerFactory.getLogger(LookerStreamingServiceImpl.class); + + final private LookerClient lookerClient; + + public LookerStreamingServiceImpl() { + lookerClient = new LookerClient("4.0"); + } + + + //#region ApiAuth: API Authentication + + /** + * ### Present client credentials to obtain an authorization token + * + * Looker API implements the OAuth2 [Resource Owner Password Credentials Grant](https://looker.com/docs/r/api/outh2_resource_owner_pc) pattern. + * The client credentials required for this login must be obtained by creating an API3 key on a user account + * in the Looker Admin console. The API3 key consists of a public `client_id` and a private `client_secret`. + * + * The access token returned by `login` must be used in the HTTP Authorization header of subsequent + * API requests, like this: + * ``` + * Authorization: token 4QDkCyCtZzYgj4C2p2cj3csJH7zqS5RzKs2kTnG4 + * ``` + * Replace "4QDkCy..." with the `access_token` value returned by `login`. + * The word `token` is a string literal and must be included exactly as shown. + * + * This function can accept `client_id` and `client_secret` parameters as URL query params or as www-form-urlencoded params in the body of the HTTP request. Since there is a small risk that URL parameters may be visible to intermediate nodes on the network route (proxies, routers, etc), passing credentials in the body of the request is considered more secure than URL params. + * + * Example of passing credentials in the HTTP request body: + * ```` + * POST HTTP /login + * Content-Type: application/x-www-form-urlencoded + * + * client_id=CGc9B7v7J48dQSJvxxx&client_secret=nNVS9cSS3xNpSC9JdsBvvvvv + * ```` + * + * ### Best Practice: + * Always pass credentials in body params. Pass credentials in URL query params **only** when you cannot pass body params due to application, tool, or other limitations. + * + * For more information and detailed examples of Looker API authorization, see [How to Authenticate to Looker API3](https://github.com/looker/looker-sdk-ruby/blob/master/authentication.md). + * + */ + @Override + public void login(LoginRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/login", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + LoginResponse.Builder responseBuilder = LoginResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create an access token that runs as a given user. + * + * This can only be called by an authenticated admin user. It allows that admin to generate a new + * authentication token for the user with the given user id. That token can then be used for subsequent + * API calls - which are then performed *as* that target user. + * + * The target user does *not* need to have a pre-existing API client_id/client_secret pair. And, no such + * credentials are created by this call. + * + * This allows for building systems where api user authentication for an arbitrary number of users is done + * outside of Looker and funneled through a single 'service account' with admin permissions. Note that a + * new access token is generated on each call. If target users are going to be making numerous API + * calls in a short period then it is wise to cache this authentication token rather than call this before + * each of those API calls. + * + * See 'login' for more detail on the access token and how to use it. + * + */ + @Override + public void loginUser(LoginUserRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/login/{user_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + LoginUserResponse.Builder responseBuilder = LoginUserResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Logout of the API and invalidate the current access token. + * + */ + @Override + public void logout(LogoutRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/logout", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + LogoutResponse.Builder responseBuilder = LogoutResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + //#endregion ApiAuth: API Authentication + + //#region Auth: Manage User Authentication Configuration + + /** + * ### Create SSO Embed URL + * + * Creates an SSO embed URL and cryptographically signs it with an embed secret. + * This signed URL can then be used to instantiate a Looker embed session in a PBL web application. + * Do not make any modifications to this URL - any change may invalidate the signature and + * cause the URL to fail to load a Looker embed session. + * + * A signed SSO embed URL can only be used once. After it has been used to request a page from the + * Looker server, the URL is invalid. Future requests using the same URL will fail. This is to prevent + * 'replay attacks'. + * + * The `target_url` property must be a complete URL of a Looker UI page - scheme, hostname, path and query params. + * To load a dashboard with id 56 and with a filter of `Date=1 years`, the looker URL would look like `https:/myname.looker.com/dashboards/56?Date=1%20years`. + * The best way to obtain this target_url is to navigate to the desired Looker page in your web browser, + * copy the URL shown in the browser address bar and paste it into the `target_url` property as a quoted string value in this API request. + * + * Permissions for the embed user are defined by the groups in which the embed user is a member (group_ids property) + * and the lists of models and permissions assigned to the embed user. + * At a minimum, you must provide values for either the group_ids property, or both the models and permissions properties. + * These properties are additive; an embed user can be a member of certain groups AND be granted access to models and permissions. + * + * The embed user's access is the union of permissions granted by the group_ids, models, and permissions properties. + * + * This function does not strictly require all group_ids, user attribute names, or model names to exist at the moment the + * SSO embed url is created. Unknown group_id, user attribute names or model names will be passed through to the output URL. + * To diagnose potential problems with an SSO embed URL, you can copy the signed URL into the Embed URI Validator text box in `/admin/embed`. + * + * The `secret_id` parameter is optional. If specified, its value must be the id of an active secret defined in the Looker instance. + * if not specified, the URL will be signed using the newest active secret defined in the Looker instance. + * + * #### Security Note + * Protect this signed URL as you would an access token or password credentials - do not write + * it to disk, do not pass it to a third party, and only pass it through a secure HTTPS + * encrypted transport. + * + */ + @Override + public void createSsoEmbedUrl(CreateSsoEmbedUrlRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/embed/sso_url", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateSsoEmbedUrlResponse.Builder responseBuilder = CreateSsoEmbedUrlResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create an Embed URL + * + * Creates an embed URL that runs as the Looker user making this API call. ("Embed as me") + * This embed URL can then be used to instantiate a Looker embed session in a + * "Powered by Looker" (PBL) web application. + * + * This is similar to Private Embedding (https://docs.looker.com/r/admin/embed/private-embed). Instead of + * of logging into the Web UI to authenticate, the user has already authenticated against the API to be able to + * make this call. However, unlike Private Embed where the user has access to any other part of the Looker UI, + * the embed web session created by requesting the EmbedUrlResponse.url in a browser only has access to + * content visible under the `/embed` context. + * + * An embed URL can only be used once, and must be used within 5 minutes of being created. After it + * has been used to request a page from the Looker server, the URL is invalid. Future requests using + * the same URL will fail. This is to prevent 'replay attacks'. + * + * The `target_url` property must be a complete URL of a Looker Embedded UI page - scheme, hostname, path starting with "/embed" and query params. + * To load a dashboard with id 56 and with a filter of `Date=1 years`, the looker Embed URL would look like `https://myname.looker.com/embed/dashboards/56?Date=1%20years`. + * The best way to obtain this target_url is to navigate to the desired Looker page in your web browser, + * copy the URL shown in the browser address bar, insert "/embed" after the host/port, and paste it into the `target_url` property as a quoted string value in this API request. + * + * #### Security Note + * Protect this embed URL as you would an access token or password credentials - do not write + * it to disk, do not pass it to a third party, and only pass it through a secure HTTPS + * encrypted transport. + * + */ + @Override + public void createEmbedUrlAsMe(CreateEmbedUrlAsMeRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/embed/token_url/me", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateEmbedUrlAsMeResponse.Builder responseBuilder = CreateEmbedUrlAsMeResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get the LDAP configuration. + * + * Looker can be optionally configured to authenticate users against an Active Directory or other LDAP directory server. + * LDAP setup requires coordination with an administrator of that directory server. + * + * Only Looker administrators can read and update the LDAP configuration. + * + * Configuring LDAP impacts authentication for all users. This configuration should be done carefully. + * + * Looker maintains a single LDAP configuration. It can be read and updated. Updates only succeed if the new state will be valid (in the sense that all required fields are populated); it is up to you to ensure that the configuration is appropriate and correct). + * + * LDAP is enabled or disabled for Looker using the **enabled** field. + * + * Looker will never return an **auth_password** field. That value can be set, but never retrieved. + * + * See the [Looker LDAP docs](https://www.looker.com/docs/r/api/ldap_setup) for additional information. + * + */ + @Override + public void ldapConfig(LdapConfigRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/ldap_config", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + LdapConfigResponse.Builder responseBuilder = LdapConfigResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update the LDAP configuration. + * + * Configuring LDAP impacts authentication for all users. This configuration should be done carefully. + * + * Only Looker administrators can read and update the LDAP configuration. + * + * LDAP is enabled or disabled for Looker using the **enabled** field. + * + * It is **highly** recommended that any LDAP setting changes be tested using the APIs below before being set globally. + * + * See the [Looker LDAP docs](https://www.looker.com/docs/r/api/ldap_setup) for additional information. + * + */ + @Override + public void updateLdapConfig(UpdateLdapConfigRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/ldap_config", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateLdapConfigResponse.Builder responseBuilder = UpdateLdapConfigResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Test the connection settings for an LDAP configuration. + * + * This tests that the connection is possible given a connection_host and connection_port. + * + * **connection_host** and **connection_port** are required. **connection_tls** is optional. + * + * Example: + * ```json + * { + * "connection_host": "ldap.example.com", + * "connection_port": "636", + * "connection_tls": true + * } + * ``` + * + * No authentication to the LDAP server is attempted. + * + * The active LDAP settings are not modified. + * + */ + @Override + public void testLdapConfigConnection(TestLdapConfigConnectionRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.put("/ldap_config/test_connection", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + TestLdapConfigConnectionResponse.Builder responseBuilder = TestLdapConfigConnectionResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Test the connection authentication settings for an LDAP configuration. + * + * This tests that the connection is possible and that a 'server' account to be used by Looker can authenticate to the LDAP server given connection and authentication information. + * + * **connection_host**, **connection_port**, and **auth_username**, are required. **connection_tls** and **auth_password** are optional. + * + * Example: + * ```json + * { + * "connection_host": "ldap.example.com", + * "connection_port": "636", + * "connection_tls": true, + * "auth_username": "cn=looker,dc=example,dc=com", + * "auth_password": "secret" + * } + * ``` + * + * Looker will never return an **auth_password**. If this request omits the **auth_password** field, then the **auth_password** value from the active config (if present) will be used for the test. + * + * The active LDAP settings are not modified. + * + * + */ + @Override + public void testLdapConfigAuth(TestLdapConfigAuthRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.put("/ldap_config/test_auth", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + TestLdapConfigAuthResponse.Builder responseBuilder = TestLdapConfigAuthResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Test the user authentication settings for an LDAP configuration without authenticating the user. + * + * This test will let you easily test the mapping for user properties and roles for any user without needing to authenticate as that user. + * + * This test accepts a full LDAP configuration along with a username and attempts to find the full info for the user from the LDAP server without actually authenticating the user. So, user password is not required.The configuration is validated before attempting to contact the server. + * + * **test_ldap_user** is required. + * + * The active LDAP settings are not modified. + * + * + */ + @Override + public void testLdapConfigUserInfo(TestLdapConfigUserInfoRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.put("/ldap_config/test_user_info", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + TestLdapConfigUserInfoResponse.Builder responseBuilder = TestLdapConfigUserInfoResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Test the user authentication settings for an LDAP configuration. + * + * This test accepts a full LDAP configuration along with a username/password pair and attempts to authenticate the user with the LDAP server. The configuration is validated before attempting the authentication. + * + * Looker will never return an **auth_password**. If this request omits the **auth_password** field, then the **auth_password** value from the active config (if present) will be used for the test. + * + * **test_ldap_user** and **test_ldap_password** are required. + * + * The active LDAP settings are not modified. + * + * + */ + @Override + public void testLdapConfigUserAuth(TestLdapConfigUserAuthRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.put("/ldap_config/test_user_auth", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + TestLdapConfigUserAuthResponse.Builder responseBuilder = TestLdapConfigUserAuthResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### List All OAuth Client Apps + * + * Lists all applications registered to use OAuth2 login with this Looker instance, including + * enabled and disabled apps. + * + * Results are filtered to include only the apps that the caller (current user) + * has permission to see. + * + */ + @Override + public void allOauthClientApps(AllOauthClientAppsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/oauth_client_apps", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllOauthClientAppsResponse.Builder responseBuilder = AllOauthClientAppsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + AllOauthClientAppsStreamResponse.Builder responseBuilder2 = AllOauthClientAppsStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get Oauth Client App + * + * Returns the registered app client with matching client_guid. + * + */ + @Override + public void oauthClientApp(OauthClientAppRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/oauth_client_apps/{client_guid}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + OauthClientAppResponse.Builder responseBuilder = OauthClientAppResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Register an OAuth2 Client App + * + * Registers details identifying an external web app or native app as an OAuth2 login client of the Looker instance. + * The app registration must provide a unique client_guid and redirect_uri that the app will present + * in OAuth login requests. If the client_guid and redirect_uri parameters in the login request do not match + * the app details registered with the Looker instance, the request is assumed to be a forgery and is rejected. + * + */ + @Override + public void registerOauthClientApp(RegisterOauthClientAppRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/oauth_client_apps/{client_guid}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + RegisterOauthClientAppResponse.Builder responseBuilder = RegisterOauthClientAppResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update OAuth2 Client App Details + * + * Modifies the details a previously registered OAuth2 login client app. + * + */ + @Override + public void updateOauthClientApp(UpdateOauthClientAppRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/oauth_client_apps/{client_guid}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateOauthClientAppResponse.Builder responseBuilder = UpdateOauthClientAppResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Delete OAuth Client App + * + * Deletes the registration info of the app with the matching client_guid. + * All active sessions and tokens issued for this app will immediately become invalid. + * + * ### Note: this deletion cannot be undone. + * + */ + @Override + public void deleteOauthClientApp(DeleteOauthClientAppRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/oauth_client_apps/{client_guid}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteOauthClientAppResponse.Builder responseBuilder = DeleteOauthClientAppResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Invalidate All Issued Tokens + * + * Immediately invalidates all auth codes, sessions, access tokens and refresh tokens issued for + * this app for ALL USERS of this app. + * + */ + @Override + public void invalidateTokens(InvalidateTokensRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/oauth_client_apps/{client_guid}/tokens", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + InvalidateTokensResponse.Builder responseBuilder = InvalidateTokensResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Activate an app for a user + * + * Activates a user for a given oauth client app. This indicates the user has been informed that + * the app will have access to the user's looker data, and that the user has accepted and allowed + * the app to use their Looker account. + * + * Activating a user for an app that the user is already activated with returns a success response. + * + */ + @Override + public void activateAppUser(ActivateAppUserRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/oauth_client_apps/{client_guid}/users/{user_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ActivateAppUserResponse.Builder responseBuilder = ActivateAppUserResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Deactivate an app for a user + * + * Deactivate a user for a given oauth client app. All tokens issued to the app for + * this user will be invalid immediately. Before the user can use the app with their + * Looker account, the user will have to read and accept an account use disclosure statement for the app. + * + * Admin users can deactivate other users, but non-admin users can only deactivate themselves. + * + * As with most REST DELETE operations, this endpoint does not return an error if the indicated + * resource (app or user) does not exist or has already been deactivated. + * + */ + @Override + public void deactivateAppUser(DeactivateAppUserRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/oauth_client_apps/{client_guid}/users/{user_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeactivateAppUserResponse.Builder responseBuilder = DeactivateAppUserResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get the OIDC configuration. + * + * Looker can be optionally configured to authenticate users against an OpenID Connect (OIDC) + * authentication server. OIDC setup requires coordination with an administrator of that server. + * + * Only Looker administrators can read and update the OIDC configuration. + * + * Configuring OIDC impacts authentication for all users. This configuration should be done carefully. + * + * Looker maintains a single OIDC configuation. It can be read and updated. Updates only succeed if the new state will be valid (in the sense that all required fields are populated); it is up to you to ensure that the configuration is appropriate and correct). + * + * OIDC is enabled or disabled for Looker using the **enabled** field. + * + */ + @Override + public void oidcConfig(OidcConfigRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/oidc_config", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + OidcConfigResponse.Builder responseBuilder = OidcConfigResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update the OIDC configuration. + * + * Configuring OIDC impacts authentication for all users. This configuration should be done carefully. + * + * Only Looker administrators can read and update the OIDC configuration. + * + * OIDC is enabled or disabled for Looker using the **enabled** field. + * + * It is **highly** recommended that any OIDC setting changes be tested using the APIs below before being set globally. + * + */ + @Override + public void updateOidcConfig(UpdateOidcConfigRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/oidc_config", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateOidcConfigResponse.Builder responseBuilder = UpdateOidcConfigResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get a OIDC test configuration by test_slug. + * + */ + @Override + public void oidcTestConfig(OidcTestConfigRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/oidc_test_configs/{test_slug}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + OidcTestConfigResponse.Builder responseBuilder = OidcTestConfigResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Delete a OIDC test configuration. + * + */ + @Override + public void deleteOidcTestConfig(DeleteOidcTestConfigRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/oidc_test_configs/{test_slug}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteOidcTestConfigResponse.Builder responseBuilder = DeleteOidcTestConfigResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create a OIDC test configuration. + * + */ + @Override + public void createOidcTestConfig(CreateOidcTestConfigRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/oidc_test_configs", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateOidcTestConfigResponse.Builder responseBuilder = CreateOidcTestConfigResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get password config. + * + */ + @Override + public void passwordConfig(PasswordConfigRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/password_config", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + PasswordConfigResponse.Builder responseBuilder = PasswordConfigResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update password config. + * + */ + @Override + public void updatePasswordConfig(UpdatePasswordConfigRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/password_config", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdatePasswordConfigResponse.Builder responseBuilder = UpdatePasswordConfigResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Force all credentials_email users to reset their login passwords upon their next login. + * + */ + @Override + public void forcePasswordResetAtNextLoginForAllUsers(ForcePasswordResetAtNextLoginForAllUsersRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.put("/password_config/force_password_reset_at_next_login_for_all_users", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ForcePasswordResetAtNextLoginForAllUsersResponse.Builder responseBuilder = ForcePasswordResetAtNextLoginForAllUsersResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get the SAML configuration. + * + * Looker can be optionally configured to authenticate users against a SAML authentication server. + * SAML setup requires coordination with an administrator of that server. + * + * Only Looker administrators can read and update the SAML configuration. + * + * Configuring SAML impacts authentication for all users. This configuration should be done carefully. + * + * Looker maintains a single SAML configuation. It can be read and updated. Updates only succeed if the new state will be valid (in the sense that all required fields are populated); it is up to you to ensure that the configuration is appropriate and correct). + * + * SAML is enabled or disabled for Looker using the **enabled** field. + * + */ + @Override + public void samlConfig(SamlConfigRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/saml_config", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SamlConfigResponse.Builder responseBuilder = SamlConfigResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update the SAML configuration. + * + * Configuring SAML impacts authentication for all users. This configuration should be done carefully. + * + * Only Looker administrators can read and update the SAML configuration. + * + * SAML is enabled or disabled for Looker using the **enabled** field. + * + * It is **highly** recommended that any SAML setting changes be tested using the APIs below before being set globally. + * + */ + @Override + public void updateSamlConfig(UpdateSamlConfigRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/saml_config", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateSamlConfigResponse.Builder responseBuilder = UpdateSamlConfigResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get a SAML test configuration by test_slug. + * + */ + @Override + public void samlTestConfig(SamlTestConfigRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/saml_test_configs/{test_slug}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SamlTestConfigResponse.Builder responseBuilder = SamlTestConfigResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Delete a SAML test configuration. + * + */ + @Override + public void deleteSamlTestConfig(DeleteSamlTestConfigRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/saml_test_configs/{test_slug}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteSamlTestConfigResponse.Builder responseBuilder = DeleteSamlTestConfigResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create a SAML test configuration. + * + */ + @Override + public void createSamlTestConfig(CreateSamlTestConfigRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/saml_test_configs", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateSamlTestConfigResponse.Builder responseBuilder = CreateSamlTestConfigResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Parse the given xml as a SAML IdP metadata document and return the result. + * + */ + @Override + public void parseSamlIdpMetadata(ParseSamlIdpMetadataRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/parse_saml_idp_metadata", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ParseSamlIdpMetadataResponse.Builder responseBuilder = ParseSamlIdpMetadataResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Fetch the given url and parse it as a SAML IdP metadata document and return the result. + * Note that this requires that the url be public or at least at a location where the Looker instance + * can fetch it without requiring any special authentication. + * + */ + @Override + public void fetchAndParseSamlIdpMetadata(FetchAndParseSamlIdpMetadataRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/fetch_and_parse_saml_idp_metadata", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + FetchAndParseSamlIdpMetadataResponse.Builder responseBuilder = FetchAndParseSamlIdpMetadataResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get session config. + * + */ + @Override + public void sessionConfig(SessionConfigRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/session_config", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SessionConfigResponse.Builder responseBuilder = SessionConfigResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update session config. + * + */ + @Override + public void updateSessionConfig(UpdateSessionConfigRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/session_config", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateSessionConfigResponse.Builder responseBuilder = UpdateSessionConfigResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get currently locked-out users. + * + */ + @Override + public void allUserLoginLockouts(AllUserLoginLockoutsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/user_login_lockouts", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllUserLoginLockoutsResponse.Builder responseBuilder = AllUserLoginLockoutsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + AllUserLoginLockoutsStreamResponse.Builder responseBuilder2 = AllUserLoginLockoutsStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Search currently locked-out users. + * + */ + @Override + public void searchUserLoginLockouts(SearchUserLoginLockoutsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/user_login_lockouts/search", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SearchUserLoginLockoutsResponse.Builder responseBuilder = SearchUserLoginLockoutsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + SearchUserLoginLockoutsStreamResponse.Builder responseBuilder2 = SearchUserLoginLockoutsStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Removes login lockout for the associated user. + * + */ + @Override + public void deleteUserLoginLockout(DeleteUserLoginLockoutRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/user_login_lockout/{key}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteUserLoginLockoutResponse.Builder responseBuilder = DeleteUserLoginLockoutResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + //#endregion Auth: Manage User Authentication Configuration + + //#region Board: Manage Boards + + /** + * ### Get information about all boards. + * + */ + @Override + public void allBoards(AllBoardsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/boards", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllBoardsResponse.Builder responseBuilder = AllBoardsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + AllBoardsStreamResponse.Builder responseBuilder2 = AllBoardsStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create a new board. + * + */ + @Override + public void createBoard(CreateBoardRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/boards", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateBoardResponse.Builder responseBuilder = CreateBoardResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Search Boards + * + * If multiple search params are given and `filter_or` is FALSE or not specified, + * search params are combined in a logical AND operation. + * Only rows that match *all* search param criteria will be returned. + * + * If `filter_or` is TRUE, multiple search params are combined in a logical OR operation. + * Results will include rows that match **any** of the search criteria. + * + * String search params use case-insensitive matching. + * String search params can contain `%` and '_' as SQL LIKE pattern match wildcard expressions. + * example="dan%" will match "danger" and "Danzig" but not "David" + * example="D_m%" will match "Damage" and "dump" + * + * Integer search params can accept a single value or a comma separated list of values. The multiple + * values will be combined under a logical OR operation - results will match at least one of + * the given values. + * + * Most search params can accept "IS NULL" and "NOT NULL" as special expressions to match + * or exclude (respectively) rows where the column is null. + * + * Boolean search params accept only "true" and "false" as values. + * + * + */ + @Override + public void searchBoards(SearchBoardsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/boards/search", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SearchBoardsResponse.Builder responseBuilder = SearchBoardsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + SearchBoardsStreamResponse.Builder responseBuilder2 = SearchBoardsStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about a board. + * + */ + @Override + public void board(BoardRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/boards/{board_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + BoardResponse.Builder responseBuilder = BoardResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update a board definition. + * + */ + @Override + public void updateBoard(UpdateBoardRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/boards/{board_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateBoardResponse.Builder responseBuilder = UpdateBoardResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Delete a board. + * + */ + @Override + public void deleteBoard(DeleteBoardRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/boards/{board_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteBoardResponse.Builder responseBuilder = DeleteBoardResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about all board items. + * + */ + @Override + public void allBoardItems(AllBoardItemsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/board_items", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllBoardItemsResponse.Builder responseBuilder = AllBoardItemsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + AllBoardItemsStreamResponse.Builder responseBuilder2 = AllBoardItemsStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create a new board item. + * + */ + @Override + public void createBoardItem(CreateBoardItemRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/board_items", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateBoardItemResponse.Builder responseBuilder = CreateBoardItemResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about a board item. + * + */ + @Override + public void boardItem(BoardItemRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/board_items/{board_item_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + BoardItemResponse.Builder responseBuilder = BoardItemResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update a board item definition. + * + */ + @Override + public void updateBoardItem(UpdateBoardItemRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/board_items/{board_item_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateBoardItemResponse.Builder responseBuilder = UpdateBoardItemResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Delete a board item. + * + */ + @Override + public void deleteBoardItem(DeleteBoardItemRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/board_items/{board_item_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteBoardItemResponse.Builder responseBuilder = DeleteBoardItemResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about all board sections. + * + */ + @Override + public void allBoardSections(AllBoardSectionsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/board_sections", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllBoardSectionsResponse.Builder responseBuilder = AllBoardSectionsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + AllBoardSectionsStreamResponse.Builder responseBuilder2 = AllBoardSectionsStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create a new board section. + * + */ + @Override + public void createBoardSection(CreateBoardSectionRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/board_sections", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateBoardSectionResponse.Builder responseBuilder = CreateBoardSectionResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about a board section. + * + */ + @Override + public void boardSection(BoardSectionRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/board_sections/{board_section_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + BoardSectionResponse.Builder responseBuilder = BoardSectionResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update a board section definition. + * + */ + @Override + public void updateBoardSection(UpdateBoardSectionRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/board_sections/{board_section_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateBoardSectionResponse.Builder responseBuilder = UpdateBoardSectionResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Delete a board section. + * + */ + @Override + public void deleteBoardSection(DeleteBoardSectionRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/board_sections/{board_section_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteBoardSectionResponse.Builder responseBuilder = DeleteBoardSectionResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + //#endregion Board: Manage Boards + + //#region ColorCollection: Manage Color Collections + + /** + * ### Get an array of all existing Color Collections + * Get a **single** color collection by id with [ColorCollection](#!/ColorCollection/color_collection) + * + * Get all **standard** color collections with [ColorCollection](#!/ColorCollection/color_collections_standard) + * + * Get all **custom** color collections with [ColorCollection](#!/ColorCollection/color_collections_custom) + * + * **Note**: Only an API user with the Admin role can call this endpoint. Unauthorized requests will return `Not Found` (404) errors. + * + * + */ + @Override + public void allColorCollections(AllColorCollectionsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/color_collections", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllColorCollectionsResponse.Builder responseBuilder = AllColorCollectionsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + AllColorCollectionsStreamResponse.Builder responseBuilder2 = AllColorCollectionsStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create a custom color collection with the specified information + * + * Creates a new custom color collection object, returning the details, including the created id. + * + * **Update** an existing color collection with [Update Color Collection](#!/ColorCollection/update_color_collection) + * + * **Permanently delete** an existing custom color collection with [Delete Color Collection](#!/ColorCollection/delete_color_collection) + * + * **Note**: Only an API user with the Admin role can call this endpoint. Unauthorized requests will return `Not Found` (404) errors. + * + * + */ + @Override + public void createColorCollection(CreateColorCollectionRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/color_collections", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateColorCollectionResponse.Builder responseBuilder = CreateColorCollectionResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get an array of all existing **Custom** Color Collections + * Get a **single** color collection by id with [ColorCollection](#!/ColorCollection/color_collection) + * + * Get all **standard** color collections with [ColorCollection](#!/ColorCollection/color_collections_standard) + * + * **Note**: Only an API user with the Admin role can call this endpoint. Unauthorized requests will return `Not Found` (404) errors. + * + * + */ + @Override + public void colorCollectionsCustom(ColorCollectionsCustomRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/color_collections/custom", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ColorCollectionsCustomResponse.Builder responseBuilder = ColorCollectionsCustomResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + ColorCollectionsCustomStreamResponse.Builder responseBuilder2 = ColorCollectionsCustomStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get an array of all existing **Standard** Color Collections + * Get a **single** color collection by id with [ColorCollection](#!/ColorCollection/color_collection) + * + * Get all **custom** color collections with [ColorCollection](#!/ColorCollection/color_collections_custom) + * + * **Note**: Only an API user with the Admin role can call this endpoint. Unauthorized requests will return `Not Found` (404) errors. + * + * + */ + @Override + public void colorCollectionsStandard(ColorCollectionsStandardRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/color_collections/standard", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ColorCollectionsStandardResponse.Builder responseBuilder = ColorCollectionsStandardResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + ColorCollectionsStandardStreamResponse.Builder responseBuilder2 = ColorCollectionsStandardStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get the default color collection + * + * Use this to retrieve the default Color Collection. + * + * Set the default color collection with [ColorCollection](#!/ColorCollection/set_default_color_collection) + * + */ + @Override + public void defaultColorCollection(DefaultColorCollectionRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/color_collections/default", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DefaultColorCollectionResponse.Builder responseBuilder = DefaultColorCollectionResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Set the global default Color Collection by ID + * + * Returns the new specified default Color Collection object. + * **Note**: Only an API user with the Admin role can call this endpoint. Unauthorized requests will return `Not Found` (404) errors. + * + * + */ + @Override + public void setDefaultColorCollection(SetDefaultColorCollectionRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.put("/color_collections/default", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SetDefaultColorCollectionResponse.Builder responseBuilder = SetDefaultColorCollectionResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get a Color Collection by ID + * + * Use this to retrieve a specific Color Collection. + * Get a **single** color collection by id with [ColorCollection](#!/ColorCollection/color_collection) + * + * Get all **standard** color collections with [ColorCollection](#!/ColorCollection/color_collections_standard) + * + * Get all **custom** color collections with [ColorCollection](#!/ColorCollection/color_collections_custom) + * + * **Note**: Only an API user with the Admin role can call this endpoint. Unauthorized requests will return `Not Found` (404) errors. + * + * + */ + @Override + public void colorCollection(ColorCollectionRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/color_collections/{collection_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ColorCollectionResponse.Builder responseBuilder = ColorCollectionResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update a custom color collection by id. + * **Note**: Only an API user with the Admin role can call this endpoint. Unauthorized requests will return `Not Found` (404) errors. + * + * + */ + @Override + public void updateColorCollection(UpdateColorCollectionRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/color_collections/{collection_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateColorCollectionResponse.Builder responseBuilder = UpdateColorCollectionResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Delete a custom color collection by id + * + * This operation permanently deletes the identified **Custom** color collection. + * + * **Standard** color collections cannot be deleted + * + * Because multiple color collections can have the same label, they must be deleted by ID, not name. + * **Note**: Only an API user with the Admin role can call this endpoint. Unauthorized requests will return `Not Found` (404) errors. + * + * + */ + @Override + public void deleteColorCollection(DeleteColorCollectionRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/color_collections/{collection_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteColorCollectionResponse.Builder responseBuilder = DeleteColorCollectionResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + //#endregion ColorCollection: Manage Color Collections + + //#region Command: Manage Commands + + /** + * ### Get All Commands. + * + */ + @Override + public void getAllCommands(GetAllCommandsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/commands", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + GetAllCommandsResponse.Builder responseBuilder = GetAllCommandsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + GetAllCommandsStreamResponse.Builder responseBuilder2 = GetAllCommandsStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create a new command. + * # Required fields: [:name, :linked_content_id, :linked_content_type] + * # `linked_content_type` must be one of ["dashboard", "lookml_dashboard"] + * # + * + */ + @Override + public void createCommand(CreateCommandRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/commands", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateCommandResponse.Builder responseBuilder = CreateCommandResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update an existing custom command. + * # Optional fields: ['name', 'description'] + * # + * + */ + @Override + public void updateCommand(UpdateCommandRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/commands/{command_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateCommandResponse.Builder responseBuilder = UpdateCommandResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Delete an existing custom command. + * + */ + @Override + public void deleteCommand(DeleteCommandRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/commands/{command_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteCommandResponse.Builder responseBuilder = DeleteCommandResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + //#endregion Command: Manage Commands + + //#region Config: Manage General Configuration + + /** + * Get the current Cloud Storage Configuration. + * + */ + @Override + public void cloudStorageConfiguration(CloudStorageConfigurationRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/cloud_storage", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CloudStorageConfigurationResponse.Builder responseBuilder = CloudStorageConfigurationResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * Update the current Cloud Storage Configuration. + * + */ + @Override + public void updateCloudStorageConfiguration(UpdateCloudStorageConfigurationRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/cloud_storage", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateCloudStorageConfigurationResponse.Builder responseBuilder = UpdateCloudStorageConfigurationResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get the current status and content of custom welcome emails + * + */ + @Override + public void customWelcomeEmail(CustomWelcomeEmailRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/custom_welcome_email", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CustomWelcomeEmailResponse.Builder responseBuilder = CustomWelcomeEmailResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * Update custom welcome email setting and values. Optionally send a test email with the new content to the currently logged in user. + * + */ + @Override + public void updateCustomWelcomeEmail(UpdateCustomWelcomeEmailRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/custom_welcome_email", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateCustomWelcomeEmailResponse.Builder responseBuilder = UpdateCustomWelcomeEmailResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * Requests to this endpoint will send a welcome email with the custom content provided in the body to the currently logged in user. + * + */ + @Override + public void updateCustomWelcomeEmailTest(UpdateCustomWelcomeEmailTestRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.put("/custom_welcome_email_test", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateCustomWelcomeEmailTestResponse.Builder responseBuilder = UpdateCustomWelcomeEmailTestResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Retrieve the value for whether or not digest emails is enabled + * + */ + @Override + public void digestEmailsEnabled(DigestEmailsEnabledRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/digest_emails_enabled", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DigestEmailsEnabledResponse.Builder responseBuilder = DigestEmailsEnabledResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update the setting for enabling/disabling digest emails + * + */ + @Override + public void updateDigestEmailsEnabled(UpdateDigestEmailsEnabledRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/digest_emails_enabled", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateDigestEmailsEnabledResponse.Builder responseBuilder = UpdateDigestEmailsEnabledResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Trigger the generation of digest email records and send them to Looker's internal system. This does not send + * any actual emails, it generates records containing content which may be of interest for users who have become inactive. + * Emails will be sent at a later time from Looker's internal system if the Digest Emails feature is enabled in settings. + */ + @Override + public void createDigestEmailSend(CreateDigestEmailSendRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/digest_email_send", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateDigestEmailSendResponse.Builder responseBuilder = CreateDigestEmailSendResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Set the menu item name and content for internal help resources + * + */ + @Override + public void internalHelpResourcesContent(InternalHelpResourcesContentRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/internal_help_resources_content", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + InternalHelpResourcesContentResponse.Builder responseBuilder = InternalHelpResourcesContentResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * Update internal help resources content + * + */ + @Override + public void updateInternalHelpResourcesContent(UpdateInternalHelpResourcesContentRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/internal_help_resources_content", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateInternalHelpResourcesContentResponse.Builder responseBuilder = UpdateInternalHelpResourcesContentResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get and set the options for internal help resources + * + */ + @Override + public void internalHelpResources(InternalHelpResourcesRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/internal_help_resources_enabled", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + InternalHelpResourcesResponse.Builder responseBuilder = InternalHelpResourcesResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * Update internal help resources settings + * + */ + @Override + public void updateInternalHelpResources(UpdateInternalHelpResourcesRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/internal_help_resources", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateInternalHelpResourcesResponse.Builder responseBuilder = UpdateInternalHelpResourcesResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get all legacy features. + * + */ + @Override + public void allLegacyFeatures(AllLegacyFeaturesRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/legacy_features", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllLegacyFeaturesResponse.Builder responseBuilder = AllLegacyFeaturesResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + AllLegacyFeaturesStreamResponse.Builder responseBuilder2 = AllLegacyFeaturesStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about the legacy feature with a specific id. + * + */ + @Override + public void legacyFeature(LegacyFeatureRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/legacy_features/{legacy_feature_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + LegacyFeatureResponse.Builder responseBuilder = LegacyFeatureResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update information about the legacy feature with a specific id. + * + */ + @Override + public void updateLegacyFeature(UpdateLegacyFeatureRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/legacy_features/{legacy_feature_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateLegacyFeatureResponse.Builder responseBuilder = UpdateLegacyFeatureResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get a list of locales that Looker supports. + * + */ + @Override + public void allLocales(AllLocalesRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/locales", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllLocalesResponse.Builder responseBuilder = AllLocalesResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + AllLocalesStreamResponse.Builder responseBuilder2 = AllLocalesStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get a list of timezones that Looker supports (e.g. useful for scheduling tasks). + * + */ + @Override + public void allTimezones(AllTimezonesRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/timezones", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllTimezonesResponse.Builder responseBuilder = AllTimezonesResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + AllTimezonesStreamResponse.Builder responseBuilder2 = AllTimezonesStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about all API versions supported by this Looker instance. + * + */ + @Override + public void versions(VersionsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/versions", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + VersionsResponse.Builder responseBuilder = VersionsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### This feature is enabled only by special license. + * ### Gets the whitelabel configuration, which includes hiding documentation links, custom favicon uploading, etc. + * + */ + @Override + public void whitelabelConfiguration(WhitelabelConfigurationRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/whitelabel_configuration", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + WhitelabelConfigurationResponse.Builder responseBuilder = WhitelabelConfigurationResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update the whitelabel configuration + * + */ + @Override + public void updateWhitelabelConfiguration(UpdateWhitelabelConfigurationRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.put("/whitelabel_configuration", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateWhitelabelConfigurationResponse.Builder responseBuilder = UpdateWhitelabelConfigurationResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + //#endregion Config: Manage General Configuration + + //#region Connection: Manage Database Connections + + /** + * ### Get information about all connections. + * + */ + @Override + public void allConnections(AllConnectionsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/connections", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllConnectionsResponse.Builder responseBuilder = AllConnectionsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + AllConnectionsStreamResponse.Builder responseBuilder2 = AllConnectionsStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create a connection using the specified configuration. + * + */ + @Override + public void createConnection(CreateConnectionRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/connections", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateConnectionResponse.Builder responseBuilder = CreateConnectionResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about a connection. + * + */ + @Override + public void connection(ConnectionRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/connections/{connection_name}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ConnectionResponse.Builder responseBuilder = ConnectionResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update a connection using the specified configuration. + * + */ + @Override + public void updateConnection(UpdateConnectionRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/connections/{connection_name}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateConnectionResponse.Builder responseBuilder = UpdateConnectionResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Delete a connection. + * + */ + @Override + public void deleteConnection(DeleteConnectionRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/connections/{connection_name}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteConnectionResponse.Builder responseBuilder = DeleteConnectionResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Delete a connection override. + * + */ + @Override + public void deleteConnectionOverride(DeleteConnectionOverrideRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/connections/{connection_name}/connection_override/{override_context}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteConnectionOverrideResponse.Builder responseBuilder = DeleteConnectionOverrideResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Test an existing connection. + * + * Note that a connection's 'dialect' property has a 'connection_tests' property that lists the + * specific types of tests that the connection supports. + * + * This API is rate limited. + * + * Unsupported tests in the request will be ignored. + * + */ + @Override + public void testConnection(TestConnectionRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.put("/connections/{connection_name}/test", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + TestConnectionResponse.Builder responseBuilder = TestConnectionResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + TestConnectionStreamResponse.Builder responseBuilder2 = TestConnectionStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Test a connection configuration. + * + * Note that a connection's 'dialect' property has a 'connection_tests' property that lists the + * specific types of tests that the connection supports. + * + * This API is rate limited. + * + * Unsupported tests in the request will be ignored. + * + */ + @Override + public void testConnectionConfig(TestConnectionConfigRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.put("/connections/test", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + TestConnectionConfigResponse.Builder responseBuilder = TestConnectionConfigResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + TestConnectionConfigStreamResponse.Builder responseBuilder2 = TestConnectionConfigStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about all dialects. + * + */ + @Override + public void allDialectInfos(AllDialectInfosRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/dialect_info", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllDialectInfosResponse.Builder responseBuilder = AllDialectInfosResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + AllDialectInfosStreamResponse.Builder responseBuilder2 = AllDialectInfosStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get all External OAuth Applications. + * + */ + @Override + public void allExternalOauthApplications(AllExternalOauthApplicationsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/external_oauth_applications", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllExternalOauthApplicationsResponse.Builder responseBuilder = AllExternalOauthApplicationsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + AllExternalOauthApplicationsStreamResponse.Builder responseBuilder2 = AllExternalOauthApplicationsStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create an OAuth Application using the specified configuration. + * + */ + @Override + public void createExternalOauthApplication(CreateExternalOauthApplicationRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/external_oauth_applications", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateExternalOauthApplicationResponse.Builder responseBuilder = CreateExternalOauthApplicationResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about all SSH Servers. + * + */ + @Override + public void allSshServers(AllSshServersRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/ssh_servers", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllSshServersResponse.Builder responseBuilder = AllSshServersResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + AllSshServersStreamResponse.Builder responseBuilder2 = AllSshServersStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create an SSH Server. + * + */ + @Override + public void createSshServer(CreateSshServerRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/ssh_servers", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateSshServerResponse.Builder responseBuilder = CreateSshServerResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about an SSH Server. + * + */ + @Override + public void sshServer(SshServerRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/ssh_server/{ssh_server_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SshServerResponse.Builder responseBuilder = SshServerResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update an SSH Server. + * + */ + @Override + public void updateSshServer(UpdateSshServerRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/ssh_server/{ssh_server_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateSshServerResponse.Builder responseBuilder = UpdateSshServerResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Delete an SSH Server. + * + */ + @Override + public void deleteSshServer(DeleteSshServerRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/ssh_server/{ssh_server_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteSshServerResponse.Builder responseBuilder = DeleteSshServerResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Test the SSH Server + * + */ + @Override + public void testSshServer(TestSshServerRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/ssh_server/{ssh_server_id}/test", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + TestSshServerResponse.Builder responseBuilder = TestSshServerResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about all SSH Tunnels. + * + */ + @Override + public void allSshTunnels(AllSshTunnelsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/ssh_tunnels", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllSshTunnelsResponse.Builder responseBuilder = AllSshTunnelsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + AllSshTunnelsStreamResponse.Builder responseBuilder2 = AllSshTunnelsStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create an SSH Tunnel + * + */ + @Override + public void createSshTunnel(CreateSshTunnelRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/ssh_tunnels", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateSshTunnelResponse.Builder responseBuilder = CreateSshTunnelResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about an SSH Tunnel. + * + */ + @Override + public void sshTunnel(SshTunnelRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/ssh_tunnel/{ssh_tunnel_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SshTunnelResponse.Builder responseBuilder = SshTunnelResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update an SSH Tunnel + * + */ + @Override + public void updateSshTunnel(UpdateSshTunnelRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/ssh_tunnel/{ssh_tunnel_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateSshTunnelResponse.Builder responseBuilder = UpdateSshTunnelResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Delete an SSH Tunnel + * + */ + @Override + public void deleteSshTunnel(DeleteSshTunnelRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/ssh_tunnel/{ssh_tunnel_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteSshTunnelResponse.Builder responseBuilder = DeleteSshTunnelResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Test the SSH Tunnel + * + */ + @Override + public void testSshTunnel(TestSshTunnelRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/ssh_tunnel/{ssh_tunnel_id}/test", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + TestSshTunnelResponse.Builder responseBuilder = TestSshTunnelResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get the SSH public key + * + * Get the public key created for this instance to identify itself to a remote SSH server. + * + */ + @Override + public void sshPublicKey(SshPublicKeyRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/ssh_public_key", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SshPublicKeyResponse.Builder responseBuilder = SshPublicKeyResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + //#endregion Connection: Manage Database Connections + + //#region Content: Manage Content + + /** + * ### Search Favorite Content + * + * If multiple search params are given and `filter_or` is FALSE or not specified, + * search params are combined in a logical AND operation. + * Only rows that match *all* search param criteria will be returned. + * + * If `filter_or` is TRUE, multiple search params are combined in a logical OR operation. + * Results will include rows that match **any** of the search criteria. + * + * String search params use case-insensitive matching. + * String search params can contain `%` and '_' as SQL LIKE pattern match wildcard expressions. + * example="dan%" will match "danger" and "Danzig" but not "David" + * example="D_m%" will match "Damage" and "dump" + * + * Integer search params can accept a single value or a comma separated list of values. The multiple + * values will be combined under a logical OR operation - results will match at least one of + * the given values. + * + * Most search params can accept "IS NULL" and "NOT NULL" as special expressions to match + * or exclude (respectively) rows where the column is null. + * + * Boolean search params accept only "true" and "false" as values. + * + * + */ + @Override + public void searchContentFavorites(SearchContentFavoritesRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/content_favorite/search", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SearchContentFavoritesResponse.Builder responseBuilder = SearchContentFavoritesResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + SearchContentFavoritesStreamResponse.Builder responseBuilder2 = SearchContentFavoritesStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get favorite content by its id + */ + @Override + public void contentFavorite(ContentFavoriteRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/content_favorite/{content_favorite_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ContentFavoriteResponse.Builder responseBuilder = ContentFavoriteResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Delete favorite content + */ + @Override + public void deleteContentFavorite(DeleteContentFavoriteRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/content_favorite/{content_favorite_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteContentFavoriteResponse.Builder responseBuilder = DeleteContentFavoriteResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create favorite content + */ + @Override + public void createContentFavorite(CreateContentFavoriteRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/content_favorite", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateContentFavoriteResponse.Builder responseBuilder = CreateContentFavoriteResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about all content metadata in a space. + * + */ + @Override + public void allContentMetadatas(AllContentMetadatasRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/content_metadata", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllContentMetadatasResponse.Builder responseBuilder = AllContentMetadatasResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + AllContentMetadatasStreamResponse.Builder responseBuilder2 = AllContentMetadatasStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about an individual content metadata record. + * + */ + @Override + public void contentMetadata(ContentMetadataRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/content_metadata/{content_metadata_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ContentMetadataResponse.Builder responseBuilder = ContentMetadataResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Move a piece of content. + * + */ + @Override + public void updateContentMetadata(UpdateContentMetadataRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/content_metadata/{content_metadata_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateContentMetadataResponse.Builder responseBuilder = UpdateContentMetadataResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### All content metadata access records for a content metadata item. + * + */ + @Override + public void allContentMetadataAccesses(AllContentMetadataAccessesRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/content_metadata_access", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllContentMetadataAccessesResponse.Builder responseBuilder = AllContentMetadataAccessesResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + AllContentMetadataAccessesStreamResponse.Builder responseBuilder2 = AllContentMetadataAccessesStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create content metadata access. + * + */ + @Override + public void createContentMetadataAccess(CreateContentMetadataAccessRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/content_metadata_access", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateContentMetadataAccessResponse.Builder responseBuilder = CreateContentMetadataAccessResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update type of access for content metadata. + * + */ + @Override + public void updateContentMetadataAccess(UpdateContentMetadataAccessRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.put("/content_metadata_access/{content_metadata_access_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateContentMetadataAccessResponse.Builder responseBuilder = UpdateContentMetadataAccessResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Remove content metadata access. + * + */ + @Override + public void deleteContentMetadataAccess(DeleteContentMetadataAccessRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/content_metadata_access/{content_metadata_access_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteContentMetadataAccessResponse.Builder responseBuilder = DeleteContentMetadataAccessResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get an image representing the contents of a dashboard or look. + * + * The returned thumbnail is an abstract representation of the contents of a dashbord or look and does not + * reflect the actual data displayed in the respective visualizations. + * + */ + @Override + public void contentThumbnail(ContentThumbnailRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/content_thumbnail/{type}/{resource_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ContentThumbnailResponse.Builder responseBuilder = ContentThumbnailResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Validate All Content + * + * Performs validation of all looks and dashboards + * Returns a list of errors found as well as metadata about the content validation run. + * + */ + @Override + public void contentValidation(ContentValidationRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/content_validation", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ContentValidationResponse.Builder responseBuilder = ContentValidationResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Search Content Views + * + * If multiple search params are given and `filter_or` is FALSE or not specified, + * search params are combined in a logical AND operation. + * Only rows that match *all* search param criteria will be returned. + * + * If `filter_or` is TRUE, multiple search params are combined in a logical OR operation. + * Results will include rows that match **any** of the search criteria. + * + * String search params use case-insensitive matching. + * String search params can contain `%` and '_' as SQL LIKE pattern match wildcard expressions. + * example="dan%" will match "danger" and "Danzig" but not "David" + * example="D_m%" will match "Damage" and "dump" + * + * Integer search params can accept a single value or a comma separated list of values. The multiple + * values will be combined under a logical OR operation - results will match at least one of + * the given values. + * + * Most search params can accept "IS NULL" and "NOT NULL" as special expressions to match + * or exclude (respectively) rows where the column is null. + * + * Boolean search params accept only "true" and "false" as values. + * + * + */ + @Override + public void searchContentViews(SearchContentViewsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/content_view/search", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SearchContentViewsResponse.Builder responseBuilder = SearchContentViewsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + SearchContentViewsStreamResponse.Builder responseBuilder2 = SearchContentViewsStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get a vector image representing the contents of a dashboard or look. + * + * # DEPRECATED: Use [content_thumbnail()](#!/Content/content_thumbnail) + * + * The returned thumbnail is an abstract representation of the contents of a dashbord or look and does not + * reflect the actual data displayed in the respective visualizations. + * + */ + @Override + public void vectorThumbnail(VectorThumbnailRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/vector_thumbnail/{type}/{resource_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + VectorThumbnailResponse.Builder responseBuilder = VectorThumbnailResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + //#endregion Content: Manage Content + + //#region Dashboard: Manage Dashboards + + /** + * ### Get information about all active dashboards. + * + * Returns an array of **abbreviated dashboard objects**. Dashboards marked as deleted are excluded from this list. + * + * Get the **full details** of a specific dashboard by id with [dashboard()](#!/Dashboard/dashboard) + * + * Find **deleted dashboards** with [search_dashboards()](#!/Dashboard/search_dashboards) + * + */ + @Override + public void allDashboards(AllDashboardsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/dashboards", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllDashboardsResponse.Builder responseBuilder = AllDashboardsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + AllDashboardsStreamResponse.Builder responseBuilder2 = AllDashboardsStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create a new dashboard + * + * Creates a new dashboard object and returns the details of the newly created dashboard. + * + * `Title`, `user_id`, and `space_id` are all required fields. + * `Space_id` and `user_id` must contain the id of an existing space or user, respectively. + * A dashboard's `title` must be unique within the space in which it resides. + * + * If you receive a 422 error response when creating a dashboard, be sure to look at the + * response body for information about exactly which fields are missing or contain invalid data. + * + * You can **update** an existing dashboard with [update_dashboard()](#!/Dashboard/update_dashboard) + * + * You can **permanently delete** an existing dashboard with [delete_dashboard()](#!/Dashboard/delete_dashboard) + * + */ + @Override + public void createDashboard(CreateDashboardRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/dashboards", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateDashboardResponse.Builder responseBuilder = CreateDashboardResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Search Dashboards + * + * Returns an **array of dashboard objects** that match the specified search criteria. + * + * If multiple search params are given and `filter_or` is FALSE or not specified, + * search params are combined in a logical AND operation. + * Only rows that match *all* search param criteria will be returned. + * + * If `filter_or` is TRUE, multiple search params are combined in a logical OR operation. + * Results will include rows that match **any** of the search criteria. + * + * String search params use case-insensitive matching. + * String search params can contain `%` and '_' as SQL LIKE pattern match wildcard expressions. + * example="dan%" will match "danger" and "Danzig" but not "David" + * example="D_m%" will match "Damage" and "dump" + * + * Integer search params can accept a single value or a comma separated list of values. The multiple + * values will be combined under a logical OR operation - results will match at least one of + * the given values. + * + * Most search params can accept "IS NULL" and "NOT NULL" as special expressions to match + * or exclude (respectively) rows where the column is null. + * + * Boolean search params accept only "true" and "false" as values. + * + * + * The parameters `limit`, and `offset` are recommended for fetching results in page-size chunks. + * + * Get a **single dashboard** by id with [dashboard()](#!/Dashboard/dashboard) + * + */ + @Override + public void searchDashboards(SearchDashboardsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/dashboards/search", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SearchDashboardsResponse.Builder responseBuilder = SearchDashboardsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + SearchDashboardsStreamResponse.Builder responseBuilder2 = SearchDashboardsStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Import a LookML dashboard to a space as a UDD + * Creates a UDD (a dashboard which exists in the Looker database rather than as a LookML file) from the LookML dashboard + * and puts it in the space specified. The created UDD will have a lookml_link_id which links to the original LookML dashboard. + * + * To give the imported dashboard specify a (e.g. title: "my title") in the body of your request, otherwise the imported + * dashboard will have the same title as the original LookML dashboard. + * + * For this operation to succeed the user must have permission to see the LookML dashboard in question, and have permission to + * create content in the space the dashboard is being imported to. + * + * **Sync** a linked UDD with [sync_lookml_dashboard()](#!/Dashboard/sync_lookml_dashboard) + * **Unlink** a linked UDD by setting lookml_link_id to null with [update_dashboard()](#!/Dashboard/update_dashboard) + * + */ + @Override + public void importLookmlDashboard(ImportLookmlDashboardRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/dashboards/{lookml_dashboard_id}/import/{space_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ImportLookmlDashboardResponse.Builder responseBuilder = ImportLookmlDashboardResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update all linked dashboards to match the specified LookML dashboard. + * + * Any UDD (a dashboard which exists in the Looker database rather than as a LookML file) which has a `lookml_link_id` + * property value referring to a LookML dashboard's id (model::dashboardname) will be updated so that it matches the current state of the LookML dashboard. + * + * For this operation to succeed the user must have permission to view the LookML dashboard, and only linked dashboards + * that the user has permission to update will be synced. + * + * To **link** or **unlink** a UDD set the `lookml_link_id` property with [update_dashboard()](#!/Dashboard/update_dashboard) + * + */ + @Override + public void syncLookmlDashboard(SyncLookmlDashboardRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/dashboards/{lookml_dashboard_id}/sync", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SyncLookmlDashboardResponse.Builder responseBuilder = SyncLookmlDashboardResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + SyncLookmlDashboardStreamResponse.Builder responseBuilder2 = SyncLookmlDashboardStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about a dashboard + * + * Returns the full details of the identified dashboard object + * + * Get a **summary list** of all active dashboards with [all_dashboards()](#!/Dashboard/all_dashboards) + * + * You can **Search** for dashboards with [search_dashboards()](#!/Dashboard/search_dashboards) + * + */ + @Override + public void dashboard(DashboardRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/dashboards/{dashboard_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DashboardResponse.Builder responseBuilder = DashboardResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update a dashboard + * + * You can use this function to change the string and integer properties of + * a dashboard. Nested objects such as filters, dashboard elements, or dashboard layout components + * cannot be modified by this function - use the update functions for the respective + * nested object types (like [update_dashboard_filter()](#!/3.1/Dashboard/update_dashboard_filter) to change a filter) + * to modify nested objects referenced by a dashboard. + * + * If you receive a 422 error response when updating a dashboard, be sure to look at the + * response body for information about exactly which fields are missing or contain invalid data. + * + */ + @Override + public void updateDashboard(UpdateDashboardRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/dashboards/{dashboard_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateDashboardResponse.Builder responseBuilder = UpdateDashboardResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Delete the dashboard with the specified id + * + * Permanently **deletes** a dashboard. (The dashboard cannot be recovered after this operation.) + * + * "Soft" delete or hide a dashboard by setting its `deleted` status to `True` with [update_dashboard()](#!/Dashboard/update_dashboard). + * + * Note: When a dashboard is deleted in the UI, it is soft deleted. Use this API call to permanently remove it, if desired. + * + */ + @Override + public void deleteDashboard(DeleteDashboardRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/dashboards/{dashboard_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteDashboardResponse.Builder responseBuilder = DeleteDashboardResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get Aggregate Table LookML for Each Query on a Dahboard + * + * Returns a JSON object that contains the dashboard id and Aggregate Table lookml + * + * + */ + @Override + public void dashboardAggregateTableLookml(DashboardAggregateTableLookmlRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/dashboards/aggregate_table_lookml/{dashboard_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DashboardAggregateTableLookmlResponse.Builder responseBuilder = DashboardAggregateTableLookmlResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get lookml of a UDD + * + * Returns a JSON object that contains the dashboard id and the full lookml + * + * + */ + @Override + public void dashboardLookml(DashboardLookmlRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/dashboards/lookml/{dashboard_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DashboardLookmlResponse.Builder responseBuilder = DashboardLookmlResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Search Dashboard Elements + * + * Returns an **array of DashboardElement objects** that match the specified search criteria. + * + * If multiple search params are given and `filter_or` is FALSE or not specified, + * search params are combined in a logical AND operation. + * Only rows that match *all* search param criteria will be returned. + * + * If `filter_or` is TRUE, multiple search params are combined in a logical OR operation. + * Results will include rows that match **any** of the search criteria. + * + * String search params use case-insensitive matching. + * String search params can contain `%` and '_' as SQL LIKE pattern match wildcard expressions. + * example="dan%" will match "danger" and "Danzig" but not "David" + * example="D_m%" will match "Damage" and "dump" + * + * Integer search params can accept a single value or a comma separated list of values. The multiple + * values will be combined under a logical OR operation - results will match at least one of + * the given values. + * + * Most search params can accept "IS NULL" and "NOT NULL" as special expressions to match + * or exclude (respectively) rows where the column is null. + * + * Boolean search params accept only "true" and "false" as values. + * + * + */ + @Override + public void searchDashboardElements(SearchDashboardElementsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/dashboard_elements/search", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SearchDashboardElementsResponse.Builder responseBuilder = SearchDashboardElementsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + SearchDashboardElementsStreamResponse.Builder responseBuilder2 = SearchDashboardElementsStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about the dashboard element with a specific id. + */ + @Override + public void dashboardElement(DashboardElementRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/dashboard_elements/{dashboard_element_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DashboardElementResponse.Builder responseBuilder = DashboardElementResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update the dashboard element with a specific id. + */ + @Override + public void updateDashboardElement(UpdateDashboardElementRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/dashboard_elements/{dashboard_element_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateDashboardElementResponse.Builder responseBuilder = UpdateDashboardElementResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Delete a dashboard element with a specific id. + */ + @Override + public void deleteDashboardElement(DeleteDashboardElementRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/dashboard_elements/{dashboard_element_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteDashboardElementResponse.Builder responseBuilder = DeleteDashboardElementResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about all the dashboard elements on a dashboard with a specific id. + */ + @Override + public void dashboardDashboardElements(DashboardDashboardElementsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/dashboards/{dashboard_id}/dashboard_elements", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DashboardDashboardElementsResponse.Builder responseBuilder = DashboardDashboardElementsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + DashboardDashboardElementsStreamResponse.Builder responseBuilder2 = DashboardDashboardElementsStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create a dashboard element on the dashboard with a specific id. + */ + @Override + public void createDashboardElement(CreateDashboardElementRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/dashboard_elements", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateDashboardElementResponse.Builder responseBuilder = CreateDashboardElementResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about the dashboard filters with a specific id. + */ + @Override + public void dashboardFilter(DashboardFilterRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/dashboard_filters/{dashboard_filter_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DashboardFilterResponse.Builder responseBuilder = DashboardFilterResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update the dashboard filter with a specific id. + */ + @Override + public void updateDashboardFilter(UpdateDashboardFilterRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/dashboard_filters/{dashboard_filter_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateDashboardFilterResponse.Builder responseBuilder = UpdateDashboardFilterResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Delete a dashboard filter with a specific id. + */ + @Override + public void deleteDashboardFilter(DeleteDashboardFilterRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/dashboard_filters/{dashboard_filter_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteDashboardFilterResponse.Builder responseBuilder = DeleteDashboardFilterResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about all the dashboard filters on a dashboard with a specific id. + */ + @Override + public void dashboardDashboardFilters(DashboardDashboardFiltersRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/dashboards/{dashboard_id}/dashboard_filters", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DashboardDashboardFiltersResponse.Builder responseBuilder = DashboardDashboardFiltersResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + DashboardDashboardFiltersStreamResponse.Builder responseBuilder2 = DashboardDashboardFiltersStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create a dashboard filter on the dashboard with a specific id. + */ + @Override + public void createDashboardFilter(CreateDashboardFilterRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/dashboard_filters", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateDashboardFilterResponse.Builder responseBuilder = CreateDashboardFilterResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about the dashboard elements with a specific id. + */ + @Override + public void dashboardLayoutComponent(DashboardLayoutComponentRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/dashboard_layout_components/{dashboard_layout_component_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DashboardLayoutComponentResponse.Builder responseBuilder = DashboardLayoutComponentResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update the dashboard element with a specific id. + */ + @Override + public void updateDashboardLayoutComponent(UpdateDashboardLayoutComponentRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/dashboard_layout_components/{dashboard_layout_component_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateDashboardLayoutComponentResponse.Builder responseBuilder = UpdateDashboardLayoutComponentResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about all the dashboard layout components for a dashboard layout with a specific id. + */ + @Override + public void dashboardLayoutDashboardLayoutComponents(DashboardLayoutDashboardLayoutComponentsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/dashboard_layouts/{dashboard_layout_id}/dashboard_layout_components", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DashboardLayoutDashboardLayoutComponentsResponse.Builder responseBuilder = DashboardLayoutDashboardLayoutComponentsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + DashboardLayoutDashboardLayoutComponentsStreamResponse.Builder responseBuilder2 = DashboardLayoutDashboardLayoutComponentsStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about the dashboard layouts with a specific id. + */ + @Override + public void dashboardLayout(DashboardLayoutRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/dashboard_layouts/{dashboard_layout_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DashboardLayoutResponse.Builder responseBuilder = DashboardLayoutResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update the dashboard layout with a specific id. + */ + @Override + public void updateDashboardLayout(UpdateDashboardLayoutRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/dashboard_layouts/{dashboard_layout_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateDashboardLayoutResponse.Builder responseBuilder = UpdateDashboardLayoutResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Delete a dashboard layout with a specific id. + */ + @Override + public void deleteDashboardLayout(DeleteDashboardLayoutRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/dashboard_layouts/{dashboard_layout_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteDashboardLayoutResponse.Builder responseBuilder = DeleteDashboardLayoutResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about all the dashboard elements on a dashboard with a specific id. + */ + @Override + public void dashboardDashboardLayouts(DashboardDashboardLayoutsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/dashboards/{dashboard_id}/dashboard_layouts", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DashboardDashboardLayoutsResponse.Builder responseBuilder = DashboardDashboardLayoutsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + DashboardDashboardLayoutsStreamResponse.Builder responseBuilder2 = DashboardDashboardLayoutsStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create a dashboard layout on the dashboard with a specific id. + */ + @Override + public void createDashboardLayout(CreateDashboardLayoutRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/dashboard_layouts", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateDashboardLayoutResponse.Builder responseBuilder = CreateDashboardLayoutResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + //#endregion Dashboard: Manage Dashboards + + //#region DataAction: Run Data Actions + + /** + * Perform a data action. The data action object can be obtained from query results, and used to perform an arbitrary action. + */ + @Override + public void performDataAction(PerformDataActionRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/data_actions", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + PerformDataActionResponse.Builder responseBuilder = PerformDataActionResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * For some data actions, the remote server may supply a form requesting further user input. This endpoint takes a data action, asks the remote server to generate a form for it, and returns that form to you for presentation to the user. + */ + @Override + public void fetchRemoteDataActionForm(FetchRemoteDataActionFormRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/data_actions/form", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + FetchRemoteDataActionFormResponse.Builder responseBuilder = FetchRemoteDataActionFormResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + //#endregion DataAction: Run Data Actions + + //#region Datagroup: Manage Datagroups + + /** + * ### Get information about all datagroups. + * + */ + @Override + public void allDatagroups(AllDatagroupsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/datagroups", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllDatagroupsResponse.Builder responseBuilder = AllDatagroupsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + AllDatagroupsStreamResponse.Builder responseBuilder2 = AllDatagroupsStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about a datagroup. + * + */ + @Override + public void datagroup(DatagroupRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/datagroups/{datagroup_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DatagroupResponse.Builder responseBuilder = DatagroupResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update a datagroup using the specified params. + * + */ + @Override + public void updateDatagroup(UpdateDatagroupRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/datagroups/{datagroup_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateDatagroupResponse.Builder responseBuilder = UpdateDatagroupResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + //#endregion Datagroup: Manage Datagroups + + //#region Folder: Manage Folders + + /** + * Search for folders by creator id, parent id, name, etc + */ + @Override + public void searchFolders(SearchFoldersRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/folders/search", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SearchFoldersResponse.Builder responseBuilder = SearchFoldersResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + SearchFoldersStreamResponse.Builder responseBuilder2 = SearchFoldersStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about the folder with a specific id. + */ + @Override + public void folder(FolderRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/folders/{folder_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + FolderResponse.Builder responseBuilder = FolderResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update the folder with a specific id. + */ + @Override + public void updateFolder(UpdateFolderRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/folders/{folder_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateFolderResponse.Builder responseBuilder = UpdateFolderResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Delete the folder with a specific id including any children folders. + * **DANGER** this will delete all looks and dashboards in the folder. + * + */ + @Override + public void deleteFolder(DeleteFolderRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/folders/{folder_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteFolderResponse.Builder responseBuilder = DeleteFolderResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about all folders. + * + * In API 3.x, this will not return empty personal folders, unless they belong to the calling user. + * In API 4.0+, all personal folders will be returned. + * + * + */ + @Override + public void allFolders(AllFoldersRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/folders", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllFoldersResponse.Builder responseBuilder = AllFoldersResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + AllFoldersStreamResponse.Builder responseBuilder2 = AllFoldersStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create a folder with specified information. + * + * Caller must have permission to edit the parent folder and to create folders, otherwise the request + * returns 404 Not Found. + * + */ + @Override + public void createFolder(CreateFolderRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/folders", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateFolderResponse.Builder responseBuilder = CreateFolderResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get the children of a folder. + */ + @Override + public void folderChildren(FolderChildrenRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/folders/{folder_id}/children", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + FolderChildrenResponse.Builder responseBuilder = FolderChildrenResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + FolderChildrenStreamResponse.Builder responseBuilder2 = FolderChildrenStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Search the children of a folder + */ + @Override + public void folderChildrenSearch(FolderChildrenSearchRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/folders/{folder_id}/children/search", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + FolderChildrenSearchResponse.Builder responseBuilder = FolderChildrenSearchResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + FolderChildrenSearchStreamResponse.Builder responseBuilder2 = FolderChildrenSearchStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get the parent of a folder + */ + @Override + public void folderParent(FolderParentRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/folders/{folder_id}/parent", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + FolderParentResponse.Builder responseBuilder = FolderParentResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get the ancestors of a folder + */ + @Override + public void folderAncestors(FolderAncestorsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/folders/{folder_id}/ancestors", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + FolderAncestorsResponse.Builder responseBuilder = FolderAncestorsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + FolderAncestorsStreamResponse.Builder responseBuilder2 = FolderAncestorsStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get all looks in a folder. + * In API 3.x, this will return all looks in a folder, including looks in the trash. + * In API 4.0+, all looks in a folder will be returned, excluding looks in the trash. + * + */ + @Override + public void folderLooks(FolderLooksRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/folders/{folder_id}/looks", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + FolderLooksResponse.Builder responseBuilder = FolderLooksResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + FolderLooksStreamResponse.Builder responseBuilder2 = FolderLooksStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get the dashboards in a folder + */ + @Override + public void folderDashboards(FolderDashboardsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/folders/{folder_id}/dashboards", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + FolderDashboardsResponse.Builder responseBuilder = FolderDashboardsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + FolderDashboardsStreamResponse.Builder responseBuilder2 = FolderDashboardsStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + //#endregion Folder: Manage Folders + + //#region Group: Manage Groups + + /** + * ### Get information about all groups. + * + */ + @Override + public void allGroups(AllGroupsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/groups", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllGroupsResponse.Builder responseBuilder = AllGroupsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + AllGroupsStreamResponse.Builder responseBuilder2 = AllGroupsStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Creates a new group (admin only). + * + */ + @Override + public void createGroup(CreateGroupRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/groups", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateGroupResponse.Builder responseBuilder = CreateGroupResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Search groups + * + * Returns all group records that match the given search criteria. + * + * If multiple search params are given and `filter_or` is FALSE or not specified, + * search params are combined in a logical AND operation. + * Only rows that match *all* search param criteria will be returned. + * + * If `filter_or` is TRUE, multiple search params are combined in a logical OR operation. + * Results will include rows that match **any** of the search criteria. + * + * String search params use case-insensitive matching. + * String search params can contain `%` and '_' as SQL LIKE pattern match wildcard expressions. + * example="dan%" will match "danger" and "Danzig" but not "David" + * example="D_m%" will match "Damage" and "dump" + * + * Integer search params can accept a single value or a comma separated list of values. The multiple + * values will be combined under a logical OR operation - results will match at least one of + * the given values. + * + * Most search params can accept "IS NULL" and "NOT NULL" as special expressions to match + * or exclude (respectively) rows where the column is null. + * + * Boolean search params accept only "true" and "false" as values. + * + * + */ + @Override + public void searchGroups(SearchGroupsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/groups/search", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SearchGroupsResponse.Builder responseBuilder = SearchGroupsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + SearchGroupsStreamResponse.Builder responseBuilder2 = SearchGroupsStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Search groups include roles + * + * Returns all group records that match the given search criteria, and attaches any associated roles. + * + * If multiple search params are given and `filter_or` is FALSE or not specified, + * search params are combined in a logical AND operation. + * Only rows that match *all* search param criteria will be returned. + * + * If `filter_or` is TRUE, multiple search params are combined in a logical OR operation. + * Results will include rows that match **any** of the search criteria. + * + * String search params use case-insensitive matching. + * String search params can contain `%` and '_' as SQL LIKE pattern match wildcard expressions. + * example="dan%" will match "danger" and "Danzig" but not "David" + * example="D_m%" will match "Damage" and "dump" + * + * Integer search params can accept a single value or a comma separated list of values. The multiple + * values will be combined under a logical OR operation - results will match at least one of + * the given values. + * + * Most search params can accept "IS NULL" and "NOT NULL" as special expressions to match + * or exclude (respectively) rows where the column is null. + * + * Boolean search params accept only "true" and "false" as values. + * + * + */ + @Override + public void searchGroupsWithRoles(SearchGroupsWithRolesRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/groups/search/with_roles", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SearchGroupsWithRolesResponse.Builder responseBuilder = SearchGroupsWithRolesResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + SearchGroupsWithRolesStreamResponse.Builder responseBuilder2 = SearchGroupsWithRolesStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Search groups include hierarchy + * + * Returns all group records that match the given search criteria, and attaches + * associated role_ids and parent group_ids. + * + * If multiple search params are given and `filter_or` is FALSE or not specified, + * search params are combined in a logical AND operation. + * Only rows that match *all* search param criteria will be returned. + * + * If `filter_or` is TRUE, multiple search params are combined in a logical OR operation. + * Results will include rows that match **any** of the search criteria. + * + * String search params use case-insensitive matching. + * String search params can contain `%` and '_' as SQL LIKE pattern match wildcard expressions. + * example="dan%" will match "danger" and "Danzig" but not "David" + * example="D_m%" will match "Damage" and "dump" + * + * Integer search params can accept a single value or a comma separated list of values. The multiple + * values will be combined under a logical OR operation - results will match at least one of + * the given values. + * + * Most search params can accept "IS NULL" and "NOT NULL" as special expressions to match + * or exclude (respectively) rows where the column is null. + * + * Boolean search params accept only "true" and "false" as values. + * + * + */ + @Override + public void searchGroupsWithHierarchy(SearchGroupsWithHierarchyRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/groups/search/with_hierarchy", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SearchGroupsWithHierarchyResponse.Builder responseBuilder = SearchGroupsWithHierarchyResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + SearchGroupsWithHierarchyStreamResponse.Builder responseBuilder2 = SearchGroupsWithHierarchyStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about a group. + * + */ + @Override + public void group(GroupRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/groups/{group_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + GroupResponse.Builder responseBuilder = GroupResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Updates the a group (admin only). + */ + @Override + public void updateGroup(UpdateGroupRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/groups/{group_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateGroupResponse.Builder responseBuilder = UpdateGroupResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Deletes a group (admin only). + * + */ + @Override + public void deleteGroup(DeleteGroupRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/groups/{group_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteGroupResponse.Builder responseBuilder = DeleteGroupResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about all the groups in a group + * + */ + @Override + public void allGroupGroups(AllGroupGroupsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/groups/{group_id}/groups", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllGroupGroupsResponse.Builder responseBuilder = AllGroupGroupsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + AllGroupGroupsStreamResponse.Builder responseBuilder2 = AllGroupGroupsStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Adds a new group to a group. + * + */ + @Override + public void addGroupGroup(AddGroupGroupRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/groups/{group_id}/groups", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AddGroupGroupResponse.Builder responseBuilder = AddGroupGroupResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about all the users directly included in a group. + * + */ + @Override + public void allGroupUsers(AllGroupUsersRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/groups/{group_id}/users", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllGroupUsersResponse.Builder responseBuilder = AllGroupUsersResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + AllGroupUsersStreamResponse.Builder responseBuilder2 = AllGroupUsersStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Adds a new user to a group. + * + */ + @Override + public void addGroupUser(AddGroupUserRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/groups/{group_id}/users", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AddGroupUserResponse.Builder responseBuilder = AddGroupUserResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Removes a user from a group. + * + */ + @Override + public void deleteGroupUser(DeleteGroupUserRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/groups/{group_id}/users/{user_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteGroupUserResponse.Builder responseBuilder = DeleteGroupUserResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Removes a group from a group. + * + */ + @Override + public void deleteGroupFromGroup(DeleteGroupFromGroupRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/groups/{group_id}/groups/{deleting_group_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteGroupFromGroupResponse.Builder responseBuilder = DeleteGroupFromGroupResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Set the value of a user attribute for a group. + * + * For information about how user attribute values are calculated, see [Set User Attribute Group Values](#!/UserAttribute/set_user_attribute_group_values). + * + */ + @Override + public void updateUserAttributeGroupValue(UpdateUserAttributeGroupValueRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/groups/{group_id}/attribute_values/{user_attribute_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateUserAttributeGroupValueResponse.Builder responseBuilder = UpdateUserAttributeGroupValueResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Remove a user attribute value from a group. + * + */ + @Override + public void deleteUserAttributeGroupValue(DeleteUserAttributeGroupValueRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/groups/{group_id}/attribute_values/{user_attribute_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteUserAttributeGroupValueResponse.Builder responseBuilder = DeleteUserAttributeGroupValueResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + //#endregion Group: Manage Groups + + //#region Homepage: Manage Homepage + + /** + * ### Get information about the primary homepage's sections. + * + */ + @Override + public void allPrimaryHomepageSections(AllPrimaryHomepageSectionsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/primary_homepage_sections", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllPrimaryHomepageSectionsResponse.Builder responseBuilder = AllPrimaryHomepageSectionsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + AllPrimaryHomepageSectionsStreamResponse.Builder responseBuilder2 = AllPrimaryHomepageSectionsStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + //#endregion Homepage: Manage Homepage + + //#region Integration: Manage Integrations + + /** + * ### Get information about all Integration Hubs. + * + */ + @Override + public void allIntegrationHubs(AllIntegrationHubsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/integration_hubs", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllIntegrationHubsResponse.Builder responseBuilder = AllIntegrationHubsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + AllIntegrationHubsStreamResponse.Builder responseBuilder2 = AllIntegrationHubsStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create a new Integration Hub. + * + * This API is rate limited to prevent it from being used for SSRF attacks + * + */ + @Override + public void createIntegrationHub(CreateIntegrationHubRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/integration_hubs", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateIntegrationHubResponse.Builder responseBuilder = CreateIntegrationHubResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about a Integration Hub. + * + */ + @Override + public void integrationHub(IntegrationHubRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/integration_hubs/{integration_hub_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + IntegrationHubResponse.Builder responseBuilder = IntegrationHubResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update a Integration Hub definition. + * + * This API is rate limited to prevent it from being used for SSRF attacks + * + */ + @Override + public void updateIntegrationHub(UpdateIntegrationHubRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/integration_hubs/{integration_hub_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateIntegrationHubResponse.Builder responseBuilder = UpdateIntegrationHubResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Delete a Integration Hub. + * + */ + @Override + public void deleteIntegrationHub(DeleteIntegrationHubRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/integration_hubs/{integration_hub_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteIntegrationHubResponse.Builder responseBuilder = DeleteIntegrationHubResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * Accepts the legal agreement for a given integration hub. This only works for integration hubs that have legal_agreement_required set to true and legal_agreement_signed set to false. + */ + @Override + public void acceptIntegrationHubLegalAgreement(AcceptIntegrationHubLegalAgreementRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/integration_hubs/{integration_hub_id}/accept_legal_agreement", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AcceptIntegrationHubLegalAgreementResponse.Builder responseBuilder = AcceptIntegrationHubLegalAgreementResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about all Integrations. + * + */ + @Override + public void allIntegrations(AllIntegrationsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/integrations", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllIntegrationsResponse.Builder responseBuilder = AllIntegrationsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + AllIntegrationsStreamResponse.Builder responseBuilder2 = AllIntegrationsStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about a Integration. + * + */ + @Override + public void integration(IntegrationRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/integrations/{integration_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + IntegrationResponse.Builder responseBuilder = IntegrationResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update parameters on a Integration. + * + */ + @Override + public void updateIntegration(UpdateIntegrationRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/integrations/{integration_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateIntegrationResponse.Builder responseBuilder = UpdateIntegrationResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * Returns the Integration form for presentation to the user. + */ + @Override + public void fetchIntegrationForm(FetchIntegrationFormRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/integrations/{integration_id}/form", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + FetchIntegrationFormResponse.Builder responseBuilder = FetchIntegrationFormResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * Tests the integration to make sure all the settings are working. + */ + @Override + public void testIntegration(TestIntegrationRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/integrations/{integration_id}/test", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + TestIntegrationResponse.Builder responseBuilder = TestIntegrationResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + //#endregion Integration: Manage Integrations + + //#region Look: Run and Manage Looks + + /** + * ### Get information about all active Looks + * + * Returns an array of **abbreviated Look objects** describing all the looks that the caller has access to. Soft-deleted Looks are **not** included. + * + * Get the **full details** of a specific look by id with [look(id)](#!/Look/look) + * + * Find **soft-deleted looks** with [search_looks()](#!/Look/search_looks) + * + */ + @Override + public void allLooks(AllLooksRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/looks", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllLooksResponse.Builder responseBuilder = AllLooksResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + AllLooksStreamResponse.Builder responseBuilder2 = AllLooksStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create a Look + * + * To create a look to display query data, first create the query with [create_query()](#!/Query/create_query) + * then assign the query's id to the `query_id` property in the call to `create_look()`. + * + * To place the look into a particular space, assign the space's id to the `space_id` property + * in the call to `create_look()`. + * + */ + @Override + public void createLook(CreateLookRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/looks", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateLookResponse.Builder responseBuilder = CreateLookResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Search Looks + * + * Returns an **array of Look objects** that match the specified search criteria. + * + * If multiple search params are given and `filter_or` is FALSE or not specified, + * search params are combined in a logical AND operation. + * Only rows that match *all* search param criteria will be returned. + * + * If `filter_or` is TRUE, multiple search params are combined in a logical OR operation. + * Results will include rows that match **any** of the search criteria. + * + * String search params use case-insensitive matching. + * String search params can contain `%` and '_' as SQL LIKE pattern match wildcard expressions. + * example="dan%" will match "danger" and "Danzig" but not "David" + * example="D_m%" will match "Damage" and "dump" + * + * Integer search params can accept a single value or a comma separated list of values. The multiple + * values will be combined under a logical OR operation - results will match at least one of + * the given values. + * + * Most search params can accept "IS NULL" and "NOT NULL" as special expressions to match + * or exclude (respectively) rows where the column is null. + * + * Boolean search params accept only "true" and "false" as values. + * + * + * Get a **single look** by id with [look(id)](#!/Look/look) + * + */ + @Override + public void searchLooks(SearchLooksRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/looks/search", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SearchLooksResponse.Builder responseBuilder = SearchLooksResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + SearchLooksStreamResponse.Builder responseBuilder2 = SearchLooksStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get a Look. + * + * Returns detailed information about a Look and its associated Query. + * + * + */ + @Override + public void look(LookRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/looks/{look_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + LookResponse.Builder responseBuilder = LookResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Modify a Look + * + * Use this function to modify parts of a look. Property values given in a call to `update_look` are + * applied to the existing look, so there's no need to include properties whose values are not changing. + * It's best to specify only the properties you want to change and leave everything else out + * of your `update_look` call. **Look properties marked 'read-only' will be ignored.** + * + * When a user deletes a look in the Looker UI, the look data remains in the database but is + * marked with a deleted flag ("soft-deleted"). Soft-deleted looks can be undeleted (by an admin) + * if the delete was in error. + * + * To soft-delete a look via the API, use [update_look()](#!/Look/update_look) to change the look's `deleted` property to `true`. + * You can undelete a look by calling `update_look` to change the look's `deleted` property to `false`. + * + * Soft-deleted looks are excluded from the results of [all_looks()](#!/Look/all_looks) and [search_looks()](#!/Look/search_looks), so they + * essentially disappear from view even though they still reside in the db. + * In API 3.1 and later, you can pass `deleted: true` as a parameter to [search_looks()](#!/3.1/Look/search_looks) to list soft-deleted looks. + * + * NOTE: [delete_look()](#!/Look/delete_look) performs a "hard delete" - the look data is removed from the Looker + * database and destroyed. There is no "undo" for `delete_look()`. + * + */ + @Override + public void updateLook(UpdateLookRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/looks/{look_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateLookResponse.Builder responseBuilder = UpdateLookResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Permanently Delete a Look + * + * This operation **permanently** removes a look from the Looker database. + * + * NOTE: There is no "undo" for this kind of delete. + * + * For information about soft-delete (which can be undone) see [update_look()](#!/Look/update_look). + * + */ + @Override + public void deleteLook(DeleteLookRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/looks/{look_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteLookResponse.Builder responseBuilder = DeleteLookResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Run a Look + * + * Runs a given look's query and returns the results in the requested format. + * + * Supported formats: + * + * | result_format | Description + * | :-----------: | :--- | + * | json | Plain json + * | json_detail | Row data plus metadata describing the fields, pivots, table calcs, and other aspects of the query + * | csv | Comma separated values with a header + * | txt | Tab separated values with a header + * | html | Simple html + * | md | Simple markdown + * | xlsx | MS Excel spreadsheet + * | sql | Returns the generated SQL rather than running the query + * | png | A PNG image of the visualization of the query + * | jpg | A JPG image of the visualization of the query + * + * + * + */ + @Override + public void runLook(RunLookRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/looks/{look_id}/run/{result_format}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + RunLookResponse.Builder responseBuilder = RunLookResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + //#endregion Look: Run and Manage Looks + + //#region LookmlModel: Manage LookML Models + + /** + * ### Get information about all lookml models. + * + */ + @Override + public void allLookmlModels(AllLookmlModelsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/lookml_models", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllLookmlModelsResponse.Builder responseBuilder = AllLookmlModelsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + AllLookmlModelsStreamResponse.Builder responseBuilder2 = AllLookmlModelsStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create a lookml model using the specified configuration. + * + */ + @Override + public void createLookmlModel(CreateLookmlModelRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/lookml_models", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateLookmlModelResponse.Builder responseBuilder = CreateLookmlModelResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about a lookml model. + * + */ + @Override + public void lookmlModel(LookmlModelRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/lookml_models/{lookml_model_name}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + LookmlModelResponse.Builder responseBuilder = LookmlModelResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update a lookml model using the specified configuration. + * + */ + @Override + public void updateLookmlModel(UpdateLookmlModelRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/lookml_models/{lookml_model_name}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateLookmlModelResponse.Builder responseBuilder = UpdateLookmlModelResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Delete a lookml model. + * + */ + @Override + public void deleteLookmlModel(DeleteLookmlModelRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/lookml_models/{lookml_model_name}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteLookmlModelResponse.Builder responseBuilder = DeleteLookmlModelResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about a lookml model explore. + * + */ + @Override + public void lookmlModelExplore(LookmlModelExploreRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/lookml_models/{lookml_model_name}/explores/{explore_name}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + LookmlModelExploreResponse.Builder responseBuilder = LookmlModelExploreResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + //#endregion LookmlModel: Manage LookML Models + + //#region Metadata: Connection Metadata Features + + /** + * ### Field name suggestions for a model and view + * + * + */ + @Override + public void modelFieldnameSuggestions(ModelFieldnameSuggestionsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/models/{model_name}/views/{view_name}/fields/{field_name}/suggestions", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ModelFieldnameSuggestionsResponse.Builder responseBuilder = ModelFieldnameSuggestionsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### List databases available to this connection + * + * Certain dialects can support multiple databases per single connection. + * If this connection supports multiple databases, the database names will be returned in an array. + * + * Connections using dialects that do not support multiple databases will return an empty array. + * + * **Note**: [Connection Features](#!/Metadata/connection_features) can be used to determine if a connection supports + * multiple databases. + * + */ + @Override + public void connectionDatabases(ConnectionDatabasesRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/connections/{connection_name}/databases", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ConnectionDatabasesResponse.Builder responseBuilder = ConnectionDatabasesResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + ConnectionDatabasesStreamResponse.Builder responseBuilder2 = ConnectionDatabasesStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Retrieve metadata features for this connection + * + * Returns a list of feature names with `true` (available) or `false` (not available) + * + * + */ + @Override + public void connectionFeatures(ConnectionFeaturesRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/connections/{connection_name}/features", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ConnectionFeaturesResponse.Builder responseBuilder = ConnectionFeaturesResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get the list of schemas and tables for a connection + * + * + */ + @Override + public void connectionSchemas(ConnectionSchemasRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/connections/{connection_name}/schemas", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ConnectionSchemasResponse.Builder responseBuilder = ConnectionSchemasResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + ConnectionSchemasStreamResponse.Builder responseBuilder2 = ConnectionSchemasStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get the list of tables for a schema + * + * For dialects that support multiple databases, optionally identify which to use. If not provided, the default + * database for the connection will be used. + * + * For dialects that do **not** support multiple databases, **do not use** the database parameter + * + */ + @Override + public void connectionTables(ConnectionTablesRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/connections/{connection_name}/tables", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ConnectionTablesResponse.Builder responseBuilder = ConnectionTablesResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + ConnectionTablesStreamResponse.Builder responseBuilder2 = ConnectionTablesStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get the columns (and therefore also the tables) in a specific schema + * + * + */ + @Override + public void connectionColumns(ConnectionColumnsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/connections/{connection_name}/columns", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ConnectionColumnsResponse.Builder responseBuilder = ConnectionColumnsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + ConnectionColumnsStreamResponse.Builder responseBuilder2 = ConnectionColumnsStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Search a connection for columns matching the specified name + * + * **Note**: `column_name` must be a valid column name. It is not a search pattern. + * + */ + @Override + public void connectionSearchColumns(ConnectionSearchColumnsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/connections/{connection_name}/search_columns", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ConnectionSearchColumnsResponse.Builder responseBuilder = ConnectionSearchColumnsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + ConnectionSearchColumnsStreamResponse.Builder responseBuilder2 = ConnectionSearchColumnsStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Connection cost estimating + * + * Assign a `sql` statement to the body of the request. e.g., for Ruby, `{sql: 'select * from users'}` + * + * **Note**: If the connection's dialect has no support for cost estimates, an error will be returned + * + */ + @Override + public void connectionCostEstimate(ConnectionCostEstimateRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/connections/{connection_name}/cost_estimate", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ConnectionCostEstimateResponse.Builder responseBuilder = ConnectionCostEstimateResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + //#endregion Metadata: Connection Metadata Features + + //#region Project: Manage Projects + + /** + * ### Generate Lockfile for All LookML Dependencies + * + * Git must have been configured, must be in dev mode and deploy permission required + * + * Install_all is a two step process + * 1. For each remote_dependency in a project the dependency manager will resolve any ambiguous ref. + * 2. The project will then write out a lockfile including each remote_dependency with its resolved ref. + * + * + */ + @Override + public void lockAll(LockAllRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/projects/{project_id}/manifest/lock_all", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + LockAllResponse.Builder responseBuilder = LockAllResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get All Git Branches + * + * Returns a list of git branches in the project repository + * + */ + @Override + public void allGitBranches(AllGitBranchesRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/projects/{project_id}/git_branches", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllGitBranchesResponse.Builder responseBuilder = AllGitBranchesResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + AllGitBranchesStreamResponse.Builder responseBuilder2 = AllGitBranchesStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get the Current Git Branch + * + * Returns the git branch currently checked out in the given project repository + * + */ + @Override + public void gitBranch(GitBranchRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/projects/{project_id}/git_branch", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + GitBranchResponse.Builder responseBuilder = GitBranchResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Checkout and/or reset --hard an existing Git Branch + * + * Only allowed in development mode + * - Call `update_session` to select the 'dev' workspace. + * + * Checkout an existing branch if name field is different from the name of the currently checked out branch. + * + * Optionally specify a branch name, tag name or commit SHA to which the branch should be reset. + * **DANGER** hard reset will be force pushed to the remote. Unsaved changes and commits may be permanently lost. + * + * + */ + @Override + public void updateGitBranch(UpdateGitBranchRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.put("/projects/{project_id}/git_branch", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateGitBranchResponse.Builder responseBuilder = UpdateGitBranchResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create and Checkout a Git Branch + * + * Creates and checks out a new branch in the given project repository + * Only allowed in development mode + * - Call `update_session` to select the 'dev' workspace. + * + * Optionally specify a branch name, tag name or commit SHA as the start point in the ref field. + * If no ref is specified, HEAD of the current branch will be used as the start point for the new branch. + * + * + */ + @Override + public void createGitBranch(CreateGitBranchRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/projects/{project_id}/git_branch", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateGitBranchResponse.Builder responseBuilder = CreateGitBranchResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get the specified Git Branch + * + * Returns the git branch specified in branch_name path param if it exists in the given project repository + * + */ + @Override + public void findGitBranch(FindGitBranchRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/projects/{project_id}/git_branch/{branch_name}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + FindGitBranchResponse.Builder responseBuilder = FindGitBranchResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Delete the specified Git Branch + * + * Delete git branch specified in branch_name path param from local and remote of specified project repository + * + */ + @Override + public void deleteGitBranch(DeleteGitBranchRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/projects/{project_id}/git_branch/{branch_name}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteGitBranchResponse.Builder responseBuilder = DeleteGitBranchResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Deploy a Remote Branch or Ref to Production + * + * Git must have been configured and deploy permission required. + * + * Deploy is a one/two step process + * 1. If this is the first deploy of this project, create the production project with git repository. + * 2. Pull the branch or ref into the production project. + * + * Can only specify either a branch or a ref. + * + * + */ + @Override + public void deployRefToProduction(DeployRefToProductionRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/projects/{project_id}/deploy_ref_to_production", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeployRefToProductionResponse.Builder responseBuilder = DeployRefToProductionResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Deploy LookML from this Development Mode Project to Production + * + * Git must have been configured, must be in dev mode and deploy permission required + * + * Deploy is a two / three step process: + * + * 1. Push commits in current branch of dev mode project to the production branch (origin/master). + * Note a. This step is skipped in read-only projects. + * Note b. If this step is unsuccessful for any reason (e.g. rejected non-fastforward because production branch has + * commits not in current branch), subsequent steps will be skipped. + * 2. If this is the first deploy of this project, create the production project with git repository. + * 3. Pull the production branch into the production project. + * + * + */ + @Override + public void deployToProduction(DeployToProductionRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/projects/{project_id}/deploy_to_production", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeployToProductionResponse.Builder responseBuilder = DeployToProductionResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Reset a project to the revision of the project that is in production. + * + * **DANGER** this will delete any changes that have not been pushed to a remote repository. + * + */ + @Override + public void resetProjectToProduction(ResetProjectToProductionRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/projects/{project_id}/reset_to_production", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ResetProjectToProductionResponse.Builder responseBuilder = ResetProjectToProductionResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Reset a project development branch to the revision of the project that is on the remote. + * + * **DANGER** this will delete any changes that have not been pushed to a remote repository. + * + */ + @Override + public void resetProjectToRemote(ResetProjectToRemoteRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/projects/{project_id}/reset_to_remote", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ResetProjectToRemoteResponse.Builder responseBuilder = ResetProjectToRemoteResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get All Projects + * + * Returns all projects visible to the current user + * + */ + @Override + public void allProjects(AllProjectsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/projects", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllProjectsResponse.Builder responseBuilder = AllProjectsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + AllProjectsStreamResponse.Builder responseBuilder2 = AllProjectsStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create A Project + * + * dev mode required. + * - Call `update_session` to select the 'dev' workspace. + * + * `name` is required. + * `git_remote_url` is not allowed. To configure Git for the newly created project, follow the instructions in `update_project`. + * + * + */ + @Override + public void createProject(CreateProjectRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/projects", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateProjectResponse.Builder responseBuilder = CreateProjectResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get A Project + * + * Returns the project with the given project id + * + */ + @Override + public void project(ProjectRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/projects/{project_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ProjectResponse.Builder responseBuilder = ProjectResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update Project Configuration + * + * Apply changes to a project's configuration. + * + * + * #### Configuring Git for a Project + * + * To set up a Looker project with a remote git repository, follow these steps: + * + * 1. Call `update_session` to select the 'dev' workspace. + * 1. Call `create_git_deploy_key` to create a new deploy key for the project + * 1. Copy the deploy key text into the remote git repository's ssh key configuration + * 1. Call `update_project` to set project's `git_remote_url` ()and `git_service_name`, if necessary). + * + * When you modify a project's `git_remote_url`, Looker connects to the remote repository to fetch + * metadata. The remote git repository MUST be configured with the Looker-generated deploy + * key for this project prior to setting the project's `git_remote_url`. + * + * To set up a Looker project with a git repository residing on the Looker server (a 'bare' git repo): + * + * 1. Call `update_session` to select the 'dev' workspace. + * 1. Call `update_project` setting `git_remote_url` to null and `git_service_name` to "bare". + * + * + */ + @Override + public void updateProject(UpdateProjectRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/projects/{project_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateProjectResponse.Builder responseBuilder = UpdateProjectResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get A Projects Manifest object + * + * Returns the project with the given project id + * + */ + @Override + public void manifest(ManifestRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/projects/{project_id}/manifest", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ManifestResponse.Builder responseBuilder = ManifestResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Git Deploy Key + * + * Returns the ssh public key previously created for a project's git repository. + * + */ + @Override + public void gitDeployKey(GitDeployKeyRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/projects/{project_id}/git/deploy_key", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + GitDeployKeyResponse.Builder responseBuilder = GitDeployKeyResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create Git Deploy Key + * + * Create a public/private key pair for authenticating ssh git requests from Looker to a remote git repository + * for a particular Looker project. + * + * Returns the public key of the generated ssh key pair. + * + * Copy this public key to your remote git repository's ssh keys configuration so that the remote git service can + * validate and accept git requests from the Looker server. + * + */ + @Override + public void createGitDeployKey(CreateGitDeployKeyRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/projects/{project_id}/git/deploy_key", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateGitDeployKeyResponse.Builder responseBuilder = CreateGitDeployKeyResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get Cached Project Validation Results + * + * Returns the cached results of a previous project validation calculation, if any. + * Returns http status 204 No Content if no validation results exist. + * + * Validating the content of all the files in a project can be computationally intensive + * for large projects. Use this API to simply fetch the results of the most recent + * project validation rather than revalidating the entire project from scratch. + * + * A value of `"stale": true` in the response indicates that the project has changed since + * the cached validation results were computed. The cached validation results may no longer + * reflect the current state of the project. + * + */ + @Override + public void projectValidationResults(ProjectValidationResultsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/projects/{project_id}/validate", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ProjectValidationResultsResponse.Builder responseBuilder = ProjectValidationResultsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Validate Project + * + * Performs lint validation of all lookml files in the project. + * Returns a list of errors found, if any. + * + * Validating the content of all the files in a project can be computationally intensive + * for large projects. For best performance, call `validate_project(project_id)` only + * when you really want to recompute project validation. To quickly display the results of + * the most recent project validation (without recomputing), use `project_validation_results(project_id)` + * + */ + @Override + public void validateProject(ValidateProjectRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/projects/{project_id}/validate", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ValidateProjectResponse.Builder responseBuilder = ValidateProjectResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get Project Workspace + * + * Returns information about the state of the project files in the currently selected workspace + * + */ + @Override + public void projectWorkspace(ProjectWorkspaceRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/projects/{project_id}/current_workspace", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ProjectWorkspaceResponse.Builder responseBuilder = ProjectWorkspaceResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get All Project Files + * + * Returns a list of the files in the project + * + */ + @Override + public void allProjectFiles(AllProjectFilesRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/projects/{project_id}/files", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllProjectFilesResponse.Builder responseBuilder = AllProjectFilesResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + AllProjectFilesStreamResponse.Builder responseBuilder2 = AllProjectFilesStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get Project File Info + * + * Returns information about a file in the project + * + */ + @Override + public void projectFile(ProjectFileRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/projects/{project_id}/files/file", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ProjectFileResponse.Builder responseBuilder = ProjectFileResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get All Git Connection Tests + * + * dev mode required. + * - Call `update_session` to select the 'dev' workspace. + * + * Returns a list of tests which can be run against a project's (or the dependency project for the provided remote_url) git connection. Call [Run Git Connection Test](#!/Project/run_git_connection_test) to execute each test in sequence. + * + * Tests are ordered by increasing specificity. Tests should be run in the order returned because later tests require functionality tested by tests earlier in the test list. + * + * For example, a late-stage test for write access is meaningless if connecting to the git server (an early test) is failing. + * + */ + @Override + public void allGitConnectionTests(AllGitConnectionTestsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/projects/{project_id}/git_connection_tests", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllGitConnectionTestsResponse.Builder responseBuilder = AllGitConnectionTestsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + AllGitConnectionTestsStreamResponse.Builder responseBuilder2 = AllGitConnectionTestsStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Run a git connection test + * + * Run the named test on the git service used by this project (or the dependency project for the provided remote_url) and return the result. This + * is intended to help debug git connections when things do not work properly, to give + * more helpful information about why a git url is not working with Looker. + * + * Tests should be run in the order they are returned by [Get All Git Connection Tests](#!/Project/all_git_connection_tests). + * + */ + @Override + public void runGitConnectionTest(RunGitConnectionTestRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/projects/{project_id}/git_connection_tests/{test_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + RunGitConnectionTestResponse.Builder responseBuilder = RunGitConnectionTestResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get All LookML Tests + * + * Returns a list of tests which can be run to validate a project's LookML code and/or the underlying data, + * optionally filtered by the file id. + * Call [Run LookML Test](#!/Project/run_lookml_test) to execute tests. + * + */ + @Override + public void allLookmlTests(AllLookmlTestsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/projects/{project_id}/lookml_tests", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllLookmlTestsResponse.Builder responseBuilder = AllLookmlTestsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + AllLookmlTestsStreamResponse.Builder responseBuilder2 = AllLookmlTestsStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Run LookML Tests + * + * Runs all tests in the project, optionally filtered by file, test, and/or model. + * + */ + @Override + public void runLookmlTest(RunLookmlTestRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/projects/{project_id}/lookml_tests/run", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + RunLookmlTestResponse.Builder responseBuilder = RunLookmlTestResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + RunLookmlTestStreamResponse.Builder responseBuilder2 = RunLookmlTestStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Configure Repository Credential for a remote dependency + * + * Admin required. + * + * `root_project_id` is required. + * `credential_id` is required. + * + * + */ + @Override + public void updateRepositoryCredential(UpdateRepositoryCredentialRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.put("/projects/{root_project_id}/credential/{credential_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateRepositoryCredentialResponse.Builder responseBuilder = UpdateRepositoryCredentialResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Repository Credential for a remote dependency + * + * Admin required. + * + * `root_project_id` is required. + * `credential_id` is required. + * + */ + @Override + public void deleteRepositoryCredential(DeleteRepositoryCredentialRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/projects/{root_project_id}/credential/{credential_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteRepositoryCredentialResponse.Builder responseBuilder = DeleteRepositoryCredentialResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get all Repository Credentials for a project + * + * `root_project_id` is required. + * + */ + @Override + public void getAllRepositoryCredentials(GetAllRepositoryCredentialsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/projects/{root_project_id}/credentials", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + GetAllRepositoryCredentialsResponse.Builder responseBuilder = GetAllRepositoryCredentialsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + GetAllRepositoryCredentialsStreamResponse.Builder responseBuilder2 = GetAllRepositoryCredentialsStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + //#endregion Project: Manage Projects + + //#region Query: Run and Manage Queries + + /** + * ### Create an async query task + * + * Creates a query task (job) to run a previously created query asynchronously. Returns a Query Task ID. + * + * Use [query_task(query_task_id)](#!/Query/query_task) to check the execution status of the query task. + * After the query task status reaches "Complete", use [query_task_results(query_task_id)](#!/Query/query_task_results) to fetch the results of the query. + * + */ + @Override + public void createQueryTask(CreateQueryTaskRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/query_tasks", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateQueryTaskResponse.Builder responseBuilder = CreateQueryTaskResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Fetch results of multiple async queries + * + * Returns the results of multiple async queries in one request. + * + * For Query Tasks that are not completed, the response will include the execution status of the Query Task but will not include query results. + * Query Tasks whose results have expired will have a status of 'expired'. + * If the user making the API request does not have sufficient privileges to view a Query Task result, the result will have a status of 'missing' + * + */ + @Override + public void queryTaskMultiResults(QueryTaskMultiResultsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/query_tasks/multi_results", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + QueryTaskMultiResultsResponse.Builder responseBuilder = QueryTaskMultiResultsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get Query Task details + * + * Use this function to check the status of an async query task. After the status + * reaches "Complete", you can call [query_task_results(query_task_id)](#!/Query/query_task_results) to + * retrieve the results of the query. + * + * Use [create_query_task()](#!/Query/create_query_task) to create an async query task. + * + */ + @Override + public void queryTask(QueryTaskRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/query_tasks/{query_task_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + QueryTaskResponse.Builder responseBuilder = QueryTaskResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get Async Query Results + * + * Returns the results of an async query task if the query has completed. + * + * If the query task is still running or waiting to run, this function returns 204 No Content. + * + * If the query task ID is invalid or the cached results of the query task have expired, this function returns 404 Not Found. + * + * Use [query_task(query_task_id)](#!/Query/query_task) to check the execution status of the query task + * Call query_task_results only after the query task status reaches "Complete". + * + * You can also use [query_task_multi_results()](#!/Query/query_task_multi_results) retrieve the + * results of multiple async query tasks at the same time. + * + * #### SQL Error Handling: + * If the query fails due to a SQL db error, how this is communicated depends on the result_format you requested in `create_query_task()`. + * + * For `json_detail` result_format: `query_task_results()` will respond with HTTP status '200 OK' and db SQL error info + * will be in the `errors` property of the response object. The 'data' property will be empty. + * + * For all other result formats: `query_task_results()` will respond with HTTP status `400 Bad Request` and some db SQL error info + * will be in the message of the 400 error response, but not as detailed as expressed in `json_detail.errors`. + * These data formats can only carry row data, and error info is not row data. + * + */ + @Override + public void queryTaskResults(QueryTaskResultsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/query_tasks/{query_task_id}/results", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + QueryTaskResultsResponse.Builder responseBuilder = QueryTaskResultsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get a previously created query by id. + * + * A Looker query object includes the various parameters that define a database query that has been run or + * could be run in the future. These parameters include: model, view, fields, filters, pivots, etc. + * Query *results* are not part of the query object. + * + * Query objects are unique and immutable. Query objects are created automatically in Looker as users explore data. + * Looker does not delete them; they become part of the query history. When asked to create a query for + * any given set of parameters, Looker will first try to find an existing query object with matching + * parameters and will only create a new object when an appropriate object can not be found. + * + * This 'get' method is used to get the details about a query for a given id. See the other methods here + * to 'create' and 'run' queries. + * + * Note that some fields like 'filter_config' and 'vis_config' etc are specific to how the Looker UI + * builds queries and visualizations and are not generally useful for API use. They are not required when + * creating new queries and can usually just be ignored. + * + * + */ + @Override + public void query(QueryRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/queries/{query_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + QueryResponse.Builder responseBuilder = QueryResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get the query for a given query slug. + * + * This returns the query for the 'slug' in a query share URL. + * + * The 'slug' is a randomly chosen short string that is used as an alternative to the query's id value + * for use in URLs etc. This method exists as a convenience to help you use the API to 'find' queries that + * have been created using the Looker UI. + * + * You can use the Looker explore page to build a query and then choose the 'Share' option to + * show the share url for the query. Share urls generally look something like 'https://looker.yourcompany/x/vwGSbfc'. + * The trailing 'vwGSbfc' is the share slug. You can pass that string to this api method to get details about the query. + * Those details include the 'id' that you can use to run the query. Or, you can copy the query body + * (perhaps with your own modification) and use that as the basis to make/run new queries. + * + * This will also work with slugs from Looker explore urls like + * 'https://looker.yourcompany/explore/ecommerce/orders?qid=aogBgL6o3cKK1jN3RoZl5s'. In this case + * 'aogBgL6o3cKK1jN3RoZl5s' is the slug. + * + */ + @Override + public void queryForSlug(QueryForSlugRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/queries/slug/{slug}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + QueryForSlugResponse.Builder responseBuilder = QueryForSlugResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create a query. + * + * This allows you to create a new query that you can later run. Looker queries are immutable once created + * and are not deleted. If you create a query that is exactly like an existing query then the existing query + * will be returned and no new query will be created. Whether a new query is created or not, you can use + * the 'id' in the returned query with the 'run' method. + * + * The query parameters are passed as json in the body of the request. + * + * + */ + @Override + public void createQuery(CreateQueryRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/queries", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateQueryResponse.Builder responseBuilder = CreateQueryResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Run a saved query. + * + * This runs a previously saved query. You can use this on a query that was generated in the Looker UI + * or one that you have explicitly created using the API. You can also use a query 'id' from a saved 'Look'. + * + * The 'result_format' parameter specifies the desired structure and format of the response. + * + * Supported formats: + * + * | result_format | Description + * | :-----------: | :--- | + * | json | Plain json + * | json_detail | Row data plus metadata describing the fields, pivots, table calcs, and other aspects of the query + * | csv | Comma separated values with a header + * | txt | Tab separated values with a header + * | html | Simple html + * | md | Simple markdown + * | xlsx | MS Excel spreadsheet + * | sql | Returns the generated SQL rather than running the query + * | png | A PNG image of the visualization of the query + * | jpg | A JPG image of the visualization of the query + * + * + * + */ + @Override + public void runQuery(RunQueryRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/queries/{query_id}/run/{result_format}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + RunQueryResponse.Builder responseBuilder = RunQueryResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Run the query that is specified inline in the posted body. + * + * This allows running a query as defined in json in the posted body. This combines + * the two actions of posting & running a query into one step. + * + * Here is an example body in json: + * ``` + * { + * "model":"thelook", + * "view":"inventory_items", + * "fields":["category.name","inventory_items.days_in_inventory_tier","products.count"], + * "filters":{"category.name":"socks"}, + * "sorts":["products.count desc 0"], + * "limit":"500", + * "query_timezone":"America/Los_Angeles" + * } + * ``` + * + * When using the Ruby SDK this would be passed as a Ruby hash like: + * ``` + * { + * :model=>"thelook", + * :view=>"inventory_items", + * :fields=> + * ["category.name", + * "inventory_items.days_in_inventory_tier", + * "products.count"], + * :filters=>{:"category.name"=>"socks"}, + * :sorts=>["products.count desc 0"], + * :limit=>"500", + * :query_timezone=>"America/Los_Angeles", + * } + * ``` + * + * This will return the result of running the query in the format specified by the 'result_format' parameter. + * + * Supported formats: + * + * | result_format | Description + * | :-----------: | :--- | + * | json | Plain json + * | json_detail | Row data plus metadata describing the fields, pivots, table calcs, and other aspects of the query + * | csv | Comma separated values with a header + * | txt | Tab separated values with a header + * | html | Simple html + * | md | Simple markdown + * | xlsx | MS Excel spreadsheet + * | sql | Returns the generated SQL rather than running the query + * | png | A PNG image of the visualization of the query + * | jpg | A JPG image of the visualization of the query + * + * + * + */ + @Override + public void runInlineQuery(RunInlineQueryRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/queries/run/{result_format}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + RunInlineQueryResponse.Builder responseBuilder = RunInlineQueryResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Run an URL encoded query. + * + * This requires the caller to encode the specifiers for the query into the URL query part using + * Looker-specific syntax as explained below. + * + * Generally, you would want to use one of the methods that takes the parameters as json in the POST body + * for creating and/or running queries. This method exists for cases where one really needs to encode the + * parameters into the URL of a single 'GET' request. This matches the way that the Looker UI formats + * 'explore' URLs etc. + * + * The parameters here are very similar to the json body formatting except that the filter syntax is + * tricky. Unfortunately, this format makes this method not currently callable via the 'Try it out!' button + * in this documentation page. But, this is callable when creating URLs manually or when using the Looker SDK. + * + * Here is an example inline query URL: + * + * ``` + * https://looker.mycompany.com:19999/api/3.0/queries/models/thelook/views/inventory_items/run/json?fields=category.name,inventory_items.days_in_inventory_tier,products.count&f[category.name]=socks&sorts=products.count+desc+0&limit=500&query_timezone=America/Los_Angeles + * ``` + * + * When invoking this endpoint with the Ruby SDK, pass the query parameter parts as a hash. The hash to match the above would look like: + * + * ```ruby + * query_params = + * { + * :fields => "category.name,inventory_items.days_in_inventory_tier,products.count", + * :"f[category.name]" => "socks", + * :sorts => "products.count desc 0", + * :limit => "500", + * :query_timezone => "America/Los_Angeles" + * } + * response = ruby_sdk.run_url_encoded_query('thelook','inventory_items','json', query_params) + * + * ``` + * + * Again, it is generally easier to use the variant of this method that passes the full query in the POST body. + * This method is available for cases where other alternatives won't fit the need. + * + * Supported formats: + * + * | result_format | Description + * | :-----------: | :--- | + * | json | Plain json + * | json_detail | Row data plus metadata describing the fields, pivots, table calcs, and other aspects of the query + * | csv | Comma separated values with a header + * | txt | Tab separated values with a header + * | html | Simple html + * | md | Simple markdown + * | xlsx | MS Excel spreadsheet + * | sql | Returns the generated SQL rather than running the query + * | png | A PNG image of the visualization of the query + * | jpg | A JPG image of the visualization of the query + * + * + * + */ + @Override + public void runUrlEncodedQuery(RunUrlEncodedQueryRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/queries/models/{model_name}/views/{view_name}/run/{result_format}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + RunUrlEncodedQueryResponse.Builder responseBuilder = RunUrlEncodedQueryResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get Merge Query + * + * Returns a merge query object given its id. + * + */ + @Override + public void mergeQuery(MergeQueryRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/merge_queries/{merge_query_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + MergeQueryResponse.Builder responseBuilder = MergeQueryResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create Merge Query + * + * Creates a new merge query object. + * + * A merge query takes the results of one or more queries and combines (merges) the results + * according to field mapping definitions. The result is similar to a SQL left outer join. + * + * A merge query can merge results of queries from different SQL databases. + * + * The order that queries are defined in the source_queries array property is significant. The + * first query in the array defines the primary key into which the results of subsequent + * queries will be merged. + * + * Like model/view query objects, merge queries are immutable and have structural identity - if + * you make a request to create a new merge query that is identical to an existing merge query, + * the existing merge query will be returned instead of creating a duplicate. Conversely, any + * change to the contents of a merge query will produce a new object with a new id. + * + */ + @Override + public void createMergeQuery(CreateMergeQueryRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/merge_queries", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateMergeQueryResponse.Builder responseBuilder = CreateMergeQueryResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * Get information about all running queries. + * + */ + @Override + public void allRunningQueries(AllRunningQueriesRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/running_queries", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllRunningQueriesResponse.Builder responseBuilder = AllRunningQueriesResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + AllRunningQueriesStreamResponse.Builder responseBuilder2 = AllRunningQueriesStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * Kill a query with a specific query_task_id. + * + */ + @Override + public void killQuery(KillQueryRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/running_queries/{query_task_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + KillQueryResponse.Builder responseBuilder = KillQueryResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * Get a SQL Runner query. + */ + @Override + public void sqlQuery(SqlQueryRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/sql_queries/{slug}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SqlQueryResponse.Builder responseBuilder = SqlQueryResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create a SQL Runner Query + * + * Either the `connection_name` or `model_name` parameter MUST be provided. + * + */ + @Override + public void createSqlQuery(CreateSqlQueryRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/sql_queries", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateSqlQueryResponse.Builder responseBuilder = CreateSqlQueryResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * Execute a SQL Runner query in a given result_format. + */ + @Override + public void runSqlQuery(RunSqlQueryRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/sql_queries/{slug}/run/{result_format}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + RunSqlQueryResponse.Builder responseBuilder = RunSqlQueryResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + //#endregion Query: Run and Manage Queries + + //#region RenderTask: Manage Render Tasks + + /** + * ### Create a new task to render a look to an image. + * + * Returns a render task object. + * To check the status of a render task, pass the render_task.id to [Get Render Task](#!/RenderTask/get_render_task). + * Once the render task is complete, you can download the resulting document or image using [Get Render Task Results](#!/RenderTask/get_render_task_results). + * + * + */ + @Override + public void createLookRenderTask(CreateLookRenderTaskRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/render_tasks/looks/{look_id}/{result_format}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateLookRenderTaskResponse.Builder responseBuilder = CreateLookRenderTaskResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create a new task to render an existing query to an image. + * + * Returns a render task object. + * To check the status of a render task, pass the render_task.id to [Get Render Task](#!/RenderTask/get_render_task). + * Once the render task is complete, you can download the resulting document or image using [Get Render Task Results](#!/RenderTask/get_render_task_results). + * + * + */ + @Override + public void createQueryRenderTask(CreateQueryRenderTaskRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/render_tasks/queries/{query_id}/{result_format}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateQueryRenderTaskResponse.Builder responseBuilder = CreateQueryRenderTaskResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create a new task to render a dashboard to a document or image. + * + * Returns a render task object. + * To check the status of a render task, pass the render_task.id to [Get Render Task](#!/RenderTask/get_render_task). + * Once the render task is complete, you can download the resulting document or image using [Get Render Task Results](#!/RenderTask/get_render_task_results). + * + * + */ + @Override + public void createDashboardRenderTask(CreateDashboardRenderTaskRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/render_tasks/dashboards/{dashboard_id}/{result_format}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateDashboardRenderTaskResponse.Builder responseBuilder = CreateDashboardRenderTaskResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about a render task. + * + * Returns a render task object. + * To check the status of a render task, pass the render_task.id to [Get Render Task](#!/RenderTask/get_render_task). + * Once the render task is complete, you can download the resulting document or image using [Get Render Task Results](#!/RenderTask/get_render_task_results). + * + * + */ + @Override + public void renderTask(RenderTaskRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/render_tasks/{render_task_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + RenderTaskResponse.Builder responseBuilder = RenderTaskResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get the document or image produced by a completed render task. + * + * Note that the PDF or image result will be a binary blob in the HTTP response, as indicated by the + * Content-Type in the response headers. This may require specialized (or at least different) handling than text + * responses such as JSON. You may need to tell your HTTP client that the response is binary so that it does not + * attempt to parse the binary data as text. + * + * If the render task exists but has not finished rendering the results, the response HTTP status will be + * **202 Accepted**, the response body will be empty, and the response will have a Retry-After header indicating + * that the caller should repeat the request at a later time. + * + * Returns 404 if the render task cannot be found, if the cached result has expired, or if the caller + * does not have permission to view the results. + * + * For detailed information about the status of the render task, use [Render Task](#!/RenderTask/render_task). + * Polling loops waiting for completion of a render task would be better served by polling **render_task(id)** until + * the task status reaches completion (or error) instead of polling **render_task_results(id)** alone. + * + */ + @Override + public void renderTaskResults(RenderTaskResultsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/render_tasks/{render_task_id}/results", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + RenderTaskResultsResponse.Builder responseBuilder = RenderTaskResultsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + //#endregion RenderTask: Manage Render Tasks + + //#region Role: Manage Roles + + /** + * ### Search model sets + * Returns all model set records that match the given search criteria. + * If multiple search params are given and `filter_or` is FALSE or not specified, + * search params are combined in a logical AND operation. + * Only rows that match *all* search param criteria will be returned. + * + * If `filter_or` is TRUE, multiple search params are combined in a logical OR operation. + * Results will include rows that match **any** of the search criteria. + * + * String search params use case-insensitive matching. + * String search params can contain `%` and '_' as SQL LIKE pattern match wildcard expressions. + * example="dan%" will match "danger" and "Danzig" but not "David" + * example="D_m%" will match "Damage" and "dump" + * + * Integer search params can accept a single value or a comma separated list of values. The multiple + * values will be combined under a logical OR operation - results will match at least one of + * the given values. + * + * Most search params can accept "IS NULL" and "NOT NULL" as special expressions to match + * or exclude (respectively) rows where the column is null. + * + * Boolean search params accept only "true" and "false" as values. + * + * + */ + @Override + public void searchModelSets(SearchModelSetsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/model_sets/search", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SearchModelSetsResponse.Builder responseBuilder = SearchModelSetsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + SearchModelSetsStreamResponse.Builder responseBuilder2 = SearchModelSetsStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about the model set with a specific id. + * + */ + @Override + public void modelSet(ModelSetRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/model_sets/{model_set_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ModelSetResponse.Builder responseBuilder = ModelSetResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update information about the model set with a specific id. + * + */ + @Override + public void updateModelSet(UpdateModelSetRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/model_sets/{model_set_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateModelSetResponse.Builder responseBuilder = UpdateModelSetResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Delete the model set with a specific id. + * + */ + @Override + public void deleteModelSet(DeleteModelSetRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/model_sets/{model_set_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteModelSetResponse.Builder responseBuilder = DeleteModelSetResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about all model sets. + * + */ + @Override + public void allModelSets(AllModelSetsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/model_sets", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllModelSetsResponse.Builder responseBuilder = AllModelSetsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + AllModelSetsStreamResponse.Builder responseBuilder2 = AllModelSetsStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create a model set with the specified information. Model sets are used by Roles. + * + */ + @Override + public void createModelSet(CreateModelSetRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/model_sets", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateModelSetResponse.Builder responseBuilder = CreateModelSetResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get all supported permissions. + * + */ + @Override + public void allPermissions(AllPermissionsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/permissions", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllPermissionsResponse.Builder responseBuilder = AllPermissionsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + AllPermissionsStreamResponse.Builder responseBuilder2 = AllPermissionsStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Search permission sets + * Returns all permission set records that match the given search criteria. + * If multiple search params are given and `filter_or` is FALSE or not specified, + * search params are combined in a logical AND operation. + * Only rows that match *all* search param criteria will be returned. + * + * If `filter_or` is TRUE, multiple search params are combined in a logical OR operation. + * Results will include rows that match **any** of the search criteria. + * + * String search params use case-insensitive matching. + * String search params can contain `%` and '_' as SQL LIKE pattern match wildcard expressions. + * example="dan%" will match "danger" and "Danzig" but not "David" + * example="D_m%" will match "Damage" and "dump" + * + * Integer search params can accept a single value or a comma separated list of values. The multiple + * values will be combined under a logical OR operation - results will match at least one of + * the given values. + * + * Most search params can accept "IS NULL" and "NOT NULL" as special expressions to match + * or exclude (respectively) rows where the column is null. + * + * Boolean search params accept only "true" and "false" as values. + * + * + */ + @Override + public void searchPermissionSets(SearchPermissionSetsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/permission_sets/search", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SearchPermissionSetsResponse.Builder responseBuilder = SearchPermissionSetsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + SearchPermissionSetsStreamResponse.Builder responseBuilder2 = SearchPermissionSetsStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about the permission set with a specific id. + * + */ + @Override + public void permissionSet(PermissionSetRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/permission_sets/{permission_set_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + PermissionSetResponse.Builder responseBuilder = PermissionSetResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update information about the permission set with a specific id. + * + */ + @Override + public void updatePermissionSet(UpdatePermissionSetRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/permission_sets/{permission_set_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdatePermissionSetResponse.Builder responseBuilder = UpdatePermissionSetResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Delete the permission set with a specific id. + * + */ + @Override + public void deletePermissionSet(DeletePermissionSetRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/permission_sets/{permission_set_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeletePermissionSetResponse.Builder responseBuilder = DeletePermissionSetResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about all permission sets. + * + */ + @Override + public void allPermissionSets(AllPermissionSetsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/permission_sets", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllPermissionSetsResponse.Builder responseBuilder = AllPermissionSetsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + AllPermissionSetsStreamResponse.Builder responseBuilder2 = AllPermissionSetsStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create a permission set with the specified information. Permission sets are used by Roles. + * + */ + @Override + public void createPermissionSet(CreatePermissionSetRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/permission_sets", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreatePermissionSetResponse.Builder responseBuilder = CreatePermissionSetResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about all roles. + * + */ + @Override + public void allRoles(AllRolesRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/roles", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllRolesResponse.Builder responseBuilder = AllRolesResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + AllRolesStreamResponse.Builder responseBuilder2 = AllRolesStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create a role with the specified information. + * + */ + @Override + public void createRole(CreateRoleRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/roles", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateRoleResponse.Builder responseBuilder = CreateRoleResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Search roles + * + * Returns all role records that match the given search criteria. + * + * If multiple search params are given and `filter_or` is FALSE or not specified, + * search params are combined in a logical AND operation. + * Only rows that match *all* search param criteria will be returned. + * + * If `filter_or` is TRUE, multiple search params are combined in a logical OR operation. + * Results will include rows that match **any** of the search criteria. + * + * String search params use case-insensitive matching. + * String search params can contain `%` and '_' as SQL LIKE pattern match wildcard expressions. + * example="dan%" will match "danger" and "Danzig" but not "David" + * example="D_m%" will match "Damage" and "dump" + * + * Integer search params can accept a single value or a comma separated list of values. The multiple + * values will be combined under a logical OR operation - results will match at least one of + * the given values. + * + * Most search params can accept "IS NULL" and "NOT NULL" as special expressions to match + * or exclude (respectively) rows where the column is null. + * + * Boolean search params accept only "true" and "false" as values. + * + * + */ + @Override + public void searchRoles(SearchRolesRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/roles/search", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SearchRolesResponse.Builder responseBuilder = SearchRolesResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + SearchRolesStreamResponse.Builder responseBuilder2 = SearchRolesStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about the role with a specific id. + * + */ + @Override + public void role(RoleRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/roles/{role_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + RoleResponse.Builder responseBuilder = RoleResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update information about the role with a specific id. + * + */ + @Override + public void updateRole(UpdateRoleRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/roles/{role_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateRoleResponse.Builder responseBuilder = UpdateRoleResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Delete the role with a specific id. + * + */ + @Override + public void deleteRole(DeleteRoleRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/roles/{role_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteRoleResponse.Builder responseBuilder = DeleteRoleResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about all the groups with the role that has a specific id. + * + */ + @Override + public void roleGroups(RoleGroupsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/roles/{role_id}/groups", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + RoleGroupsResponse.Builder responseBuilder = RoleGroupsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + RoleGroupsStreamResponse.Builder responseBuilder2 = RoleGroupsStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Set all groups for a role, removing all existing group associations from that role. + * + */ + @Override + public void setRoleGroups(SetRoleGroupsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.put("/roles/{role_id}/groups", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SetRoleGroupsResponse.Builder responseBuilder = SetRoleGroupsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + SetRoleGroupsStreamResponse.Builder responseBuilder2 = SetRoleGroupsStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about all the users with the role that has a specific id. + * + */ + @Override + public void roleUsers(RoleUsersRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/roles/{role_id}/users", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + RoleUsersResponse.Builder responseBuilder = RoleUsersResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + RoleUsersStreamResponse.Builder responseBuilder2 = RoleUsersStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Set all the users of the role with a specific id. + * + */ + @Override + public void setRoleUsers(SetRoleUsersRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.put("/roles/{role_id}/users", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SetRoleUsersResponse.Builder responseBuilder = SetRoleUsersResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + SetRoleUsersStreamResponse.Builder responseBuilder2 = SetRoleUsersStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + //#endregion Role: Manage Roles + + //#region ScheduledPlan: Manage Scheduled Plans + + /** + * ### Get Scheduled Plans for a Space + * + * Returns scheduled plans owned by the caller for a given space id. + * + */ + @Override + public void scheduledPlansForSpace(ScheduledPlansForSpaceRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/scheduled_plans/space/{space_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ScheduledPlansForSpaceResponse.Builder responseBuilder = ScheduledPlansForSpaceResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + ScheduledPlansForSpaceStreamResponse.Builder responseBuilder2 = ScheduledPlansForSpaceStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get Information About a Scheduled Plan + * + * Admins can fetch information about other users' Scheduled Plans. + * + */ + @Override + public void scheduledPlan(ScheduledPlanRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/scheduled_plans/{scheduled_plan_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ScheduledPlanResponse.Builder responseBuilder = ScheduledPlanResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update a Scheduled Plan + * + * Admins can update other users' Scheduled Plans. + * + * Note: Any scheduled plan destinations specified in an update will **replace** all scheduled plan destinations + * currently defined for the scheduled plan. + * + * For Example: If a scheduled plan has destinations A, B, and C, and you call update on this scheduled plan + * specifying only B in the destinations, then destinations A and C will be deleted by the update. + * + * Updating a scheduled plan to assign null or an empty array to the scheduled_plan_destinations property is an error, as a scheduled plan must always have at least one destination. + * + * If you omit the scheduled_plan_destinations property from the object passed to update, then the destinations + * defined on the original scheduled plan will remain unchanged. + * + * #### Email Permissions: + * + * For details about permissions required to schedule delivery to email and the safeguards + * Looker offers to protect against sending to unauthorized email destinations, see [Email Domain Whitelist for Scheduled Looks](https://docs.looker.com/r/api/embed-permissions). + * + * + * #### Scheduled Plan Destination Formats + * + * Scheduled plan destinations must specify the data format to produce and send to the destination. + * + * Formats: + * + * | format | Description + * | :-----------: | :--- | + * | json | A JSON object containing a `data` property which contains an array of JSON objects, one per row. No metadata. + * | json_detail | Row data plus metadata describing the fields, pivots, table calcs, and other aspects of the query + * | inline_json | Same as the JSON format, except that the `data` property is a string containing JSON-escaped row data. Additional properties describe the data operation. This format is primarily used to send data to web hooks so that the web hook doesn't have to re-encode the JSON row data in order to pass it on to its ultimate destination. + * | csv | Comma separated values with a header + * | txt | Tab separated values with a header + * | html | Simple html + * | xlsx | MS Excel spreadsheet + * | wysiwyg_pdf | Dashboard rendered in a tiled layout to produce a PDF document + * | assembled_pdf | Dashboard rendered in a single column layout to produce a PDF document + * | wysiwyg_png | Dashboard rendered in a tiled layout to produce a PNG image + * || + * + * Valid formats vary by destination type and source object. `wysiwyg_pdf` is only valid for dashboards, for example. + * + * + * + */ + @Override + public void updateScheduledPlan(UpdateScheduledPlanRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/scheduled_plans/{scheduled_plan_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateScheduledPlanResponse.Builder responseBuilder = UpdateScheduledPlanResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Delete a Scheduled Plan + * + * Normal users can only delete their own scheduled plans. + * Admins can delete other users' scheduled plans. + * This delete cannot be undone. + * + */ + @Override + public void deleteScheduledPlan(DeleteScheduledPlanRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/scheduled_plans/{scheduled_plan_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteScheduledPlanResponse.Builder responseBuilder = DeleteScheduledPlanResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### List All Scheduled Plans + * + * Returns all scheduled plans which belong to the caller or given user. + * + * If no user_id is provided, this function returns the scheduled plans owned by the caller. + * + * + * To list all schedules for all users, pass `all_users=true`. + * + * + * The caller must have `see_schedules` permission to see other users' scheduled plans. + * + * + * + */ + @Override + public void allScheduledPlans(AllScheduledPlansRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/scheduled_plans", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllScheduledPlansResponse.Builder responseBuilder = AllScheduledPlansResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + AllScheduledPlansStreamResponse.Builder responseBuilder2 = AllScheduledPlansStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create a Scheduled Plan + * + * Create a scheduled plan to render a Look or Dashboard on a recurring schedule. + * + * To create a scheduled plan, you MUST provide values for the following fields: + * `name` + * and + * `look_id`, `dashboard_id`, `lookml_dashboard_id`, or `query_id` + * and + * `cron_tab` or `datagroup` + * and + * at least one scheduled_plan_destination + * + * A scheduled plan MUST have at least one scheduled_plan_destination defined. + * + * When `look_id` is set, `require_no_results`, `require_results`, and `require_change` are all required. + * + * If `create_scheduled_plan` fails with a 422 error, be sure to look at the error messages in the response which will explain exactly what fields are missing or values that are incompatible. + * + * The queries that provide the data for the look or dashboard are run in the context of user account that owns the scheduled plan. + * + * When `run_as_recipient` is `false` or not specified, the queries that provide the data for the + * look or dashboard are run in the context of user account that owns the scheduled plan. + * + * When `run_as_recipient` is `true` and all the email recipients are Looker user accounts, the + * queries are run in the context of each recipient, so different recipients may see different + * data from the same scheduled render of a look or dashboard. For more details, see [Run As Recipient](https://looker.com/docs/r/admin/run-as-recipient). + * + * Admins can create and modify scheduled plans on behalf of other users by specifying a user id. + * Non-admin users may not create or modify scheduled plans by or for other users. + * + * #### Email Permissions: + * + * For details about permissions required to schedule delivery to email and the safeguards + * Looker offers to protect against sending to unauthorized email destinations, see [Email Domain Whitelist for Scheduled Looks](https://docs.looker.com/r/api/embed-permissions). + * + * + * #### Scheduled Plan Destination Formats + * + * Scheduled plan destinations must specify the data format to produce and send to the destination. + * + * Formats: + * + * | format | Description + * | :-----------: | :--- | + * | json | A JSON object containing a `data` property which contains an array of JSON objects, one per row. No metadata. + * | json_detail | Row data plus metadata describing the fields, pivots, table calcs, and other aspects of the query + * | inline_json | Same as the JSON format, except that the `data` property is a string containing JSON-escaped row data. Additional properties describe the data operation. This format is primarily used to send data to web hooks so that the web hook doesn't have to re-encode the JSON row data in order to pass it on to its ultimate destination. + * | csv | Comma separated values with a header + * | txt | Tab separated values with a header + * | html | Simple html + * | xlsx | MS Excel spreadsheet + * | wysiwyg_pdf | Dashboard rendered in a tiled layout to produce a PDF document + * | assembled_pdf | Dashboard rendered in a single column layout to produce a PDF document + * | wysiwyg_png | Dashboard rendered in a tiled layout to produce a PNG image + * || + * + * Valid formats vary by destination type and source object. `wysiwyg_pdf` is only valid for dashboards, for example. + * + * + * + */ + @Override + public void createScheduledPlan(CreateScheduledPlanRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/scheduled_plans", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateScheduledPlanResponse.Builder responseBuilder = CreateScheduledPlanResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Run a Scheduled Plan Immediately + * + * Create a scheduled plan that runs only once, and immediately. + * + * This can be useful for testing a Scheduled Plan before committing to a production schedule. + * + * Admins can create scheduled plans on behalf of other users by specifying a user id. + * + * This API is rate limited to prevent it from being used for relay spam or DoS attacks + * + * #### Email Permissions: + * + * For details about permissions required to schedule delivery to email and the safeguards + * Looker offers to protect against sending to unauthorized email destinations, see [Email Domain Whitelist for Scheduled Looks](https://docs.looker.com/r/api/embed-permissions). + * + * + * #### Scheduled Plan Destination Formats + * + * Scheduled plan destinations must specify the data format to produce and send to the destination. + * + * Formats: + * + * | format | Description + * | :-----------: | :--- | + * | json | A JSON object containing a `data` property which contains an array of JSON objects, one per row. No metadata. + * | json_detail | Row data plus metadata describing the fields, pivots, table calcs, and other aspects of the query + * | inline_json | Same as the JSON format, except that the `data` property is a string containing JSON-escaped row data. Additional properties describe the data operation. This format is primarily used to send data to web hooks so that the web hook doesn't have to re-encode the JSON row data in order to pass it on to its ultimate destination. + * | csv | Comma separated values with a header + * | txt | Tab separated values with a header + * | html | Simple html + * | xlsx | MS Excel spreadsheet + * | wysiwyg_pdf | Dashboard rendered in a tiled layout to produce a PDF document + * | assembled_pdf | Dashboard rendered in a single column layout to produce a PDF document + * | wysiwyg_png | Dashboard rendered in a tiled layout to produce a PNG image + * || + * + * Valid formats vary by destination type and source object. `wysiwyg_pdf` is only valid for dashboards, for example. + * + * + * + */ + @Override + public void scheduledPlanRunOnce(ScheduledPlanRunOnceRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/scheduled_plans/run_once", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ScheduledPlanRunOnceResponse.Builder responseBuilder = ScheduledPlanRunOnceResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get Scheduled Plans for a Look + * + * Returns all scheduled plans for a look which belong to the caller or given user. + * + * If no user_id is provided, this function returns the scheduled plans owned by the caller. + * + * + * To list all schedules for all users, pass `all_users=true`. + * + * + * The caller must have `see_schedules` permission to see other users' scheduled plans. + * + * + * + */ + @Override + public void scheduledPlansForLook(ScheduledPlansForLookRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/scheduled_plans/look/{look_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ScheduledPlansForLookResponse.Builder responseBuilder = ScheduledPlansForLookResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + ScheduledPlansForLookStreamResponse.Builder responseBuilder2 = ScheduledPlansForLookStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get Scheduled Plans for a Dashboard + * + * Returns all scheduled plans for a dashboard which belong to the caller or given user. + * + * If no user_id is provided, this function returns the scheduled plans owned by the caller. + * + * + * To list all schedules for all users, pass `all_users=true`. + * + * + * The caller must have `see_schedules` permission to see other users' scheduled plans. + * + * + * + */ + @Override + public void scheduledPlansForDashboard(ScheduledPlansForDashboardRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/scheduled_plans/dashboard/{dashboard_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ScheduledPlansForDashboardResponse.Builder responseBuilder = ScheduledPlansForDashboardResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + ScheduledPlansForDashboardStreamResponse.Builder responseBuilder2 = ScheduledPlansForDashboardStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get Scheduled Plans for a LookML Dashboard + * + * Returns all scheduled plans for a LookML Dashboard which belong to the caller or given user. + * + * If no user_id is provided, this function returns the scheduled plans owned by the caller. + * + * + * To list all schedules for all users, pass `all_users=true`. + * + * + * The caller must have `see_schedules` permission to see other users' scheduled plans. + * + * + * + */ + @Override + public void scheduledPlansForLookmlDashboard(ScheduledPlansForLookmlDashboardRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/scheduled_plans/lookml_dashboard/{lookml_dashboard_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ScheduledPlansForLookmlDashboardResponse.Builder responseBuilder = ScheduledPlansForLookmlDashboardResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + ScheduledPlansForLookmlDashboardStreamResponse.Builder responseBuilder2 = ScheduledPlansForLookmlDashboardStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Run a Scheduled Plan By Id Immediately + * This function creates a run-once schedule plan based on an existing scheduled plan, + * applies modifications (if any) to the new scheduled plan, and runs the new schedule plan immediately. + * This can be useful for testing modifications to an existing scheduled plan before committing to a production schedule. + * + * This function internally performs the following operations: + * + * 1. Copies the properties of the existing scheduled plan into a new scheduled plan + * 2. Copies any properties passed in the JSON body of this request into the new scheduled plan (replacing the original values) + * 3. Creates the new scheduled plan + * 4. Runs the new scheduled plan + * + * The original scheduled plan is not modified by this operation. + * Admins can create, modify, and run scheduled plans on behalf of other users by specifying a user id. + * Non-admins can only create, modify, and run their own scheduled plans. + * + * #### Email Permissions: + * + * For details about permissions required to schedule delivery to email and the safeguards + * Looker offers to protect against sending to unauthorized email destinations, see [Email Domain Whitelist for Scheduled Looks](https://docs.looker.com/r/api/embed-permissions). + * + * + * #### Scheduled Plan Destination Formats + * + * Scheduled plan destinations must specify the data format to produce and send to the destination. + * + * Formats: + * + * | format | Description + * | :-----------: | :--- | + * | json | A JSON object containing a `data` property which contains an array of JSON objects, one per row. No metadata. + * | json_detail | Row data plus metadata describing the fields, pivots, table calcs, and other aspects of the query + * | inline_json | Same as the JSON format, except that the `data` property is a string containing JSON-escaped row data. Additional properties describe the data operation. This format is primarily used to send data to web hooks so that the web hook doesn't have to re-encode the JSON row data in order to pass it on to its ultimate destination. + * | csv | Comma separated values with a header + * | txt | Tab separated values with a header + * | html | Simple html + * | xlsx | MS Excel spreadsheet + * | wysiwyg_pdf | Dashboard rendered in a tiled layout to produce a PDF document + * | assembled_pdf | Dashboard rendered in a single column layout to produce a PDF document + * | wysiwyg_png | Dashboard rendered in a tiled layout to produce a PNG image + * || + * + * Valid formats vary by destination type and source object. `wysiwyg_pdf` is only valid for dashboards, for example. + * + * + * + * This API is rate limited to prevent it from being used for relay spam or DoS attacks + * + * + */ + @Override + public void scheduledPlanRunOnceById(ScheduledPlanRunOnceByIdRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/scheduled_plans/{scheduled_plan_id}/run_once", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ScheduledPlanRunOnceByIdResponse.Builder responseBuilder = ScheduledPlanRunOnceByIdResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + //#endregion ScheduledPlan: Manage Scheduled Plans + + //#region Session: Session Information + + /** + * ### Get API Session + * + * Returns information about the current API session, such as which workspace is selected for the session. + * + */ + @Override + public void session(SessionRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/session", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SessionResponse.Builder responseBuilder = SessionResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update API Session + * + * #### API Session Workspace + * + * You can use this endpoint to change the active workspace for the current API session. + * + * Only one workspace can be active in a session. The active workspace can be changed + * any number of times in a session. + * + * The default workspace for API sessions is the "production" workspace. + * + * All Looker APIs that use projects or lookml models (such as running queries) will + * use the version of project and model files defined by this workspace for the lifetime of the + * current API session or until the session workspace is changed again. + * + * An API session has the same lifetime as the access_token used to authenticate API requests. Each successful + * API login generates a new access_token and a new API session. + * + * If your Looker API client application needs to work in a dev workspace across multiple + * API sessions, be sure to select the dev workspace after each login. + * + */ + @Override + public void updateSession(UpdateSessionRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/session", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateSessionResponse.Builder responseBuilder = UpdateSessionResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + //#endregion Session: Session Information + + //#region Theme: Manage Themes + + /** + * ### Get an array of all existing themes + * + * Get a **single theme** by id with [Theme](#!/Theme/theme) + * + * This method returns an array of all existing themes. The active time for the theme is not considered. + * + * **Note**: Custom themes needs to be enabled by Looker. Unless custom themes are enabled, only the automatically generated default theme can be used. Please contact your Account Manager or help.looker.com to update your license for this feature. + * + * + */ + @Override + public void allThemes(AllThemesRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/themes", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllThemesResponse.Builder responseBuilder = AllThemesResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + AllThemesStreamResponse.Builder responseBuilder2 = AllThemesStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create a theme + * + * Creates a new theme object, returning the theme details, including the created id. + * + * If `settings` are not specified, the default theme settings will be copied into the new theme. + * + * The theme `name` can only contain alphanumeric characters or underscores. Theme names should not contain any confidential information, such as customer names. + * + * **Update** an existing theme with [Update Theme](#!/Theme/update_theme) + * + * **Permanently delete** an existing theme with [Delete Theme](#!/Theme/delete_theme) + * + * For more information, see [Creating and Applying Themes](https://looker.com/docs/r/admin/themes). + * + * **Note**: Custom themes needs to be enabled by Looker. Unless custom themes are enabled, only the automatically generated default theme can be used. Please contact your Account Manager or help.looker.com to update your license for this feature. + * + * + */ + @Override + public void createTheme(CreateThemeRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/themes", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateThemeResponse.Builder responseBuilder = CreateThemeResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Search all themes for matching criteria. + * + * Returns an **array of theme objects** that match the specified search criteria. + * + * | Search Parameters | Description + * | :-------------------: | :------ | + * | `begin_at` only | Find themes active at or after `begin_at` + * | `end_at` only | Find themes active at or before `end_at` + * | both set | Find themes with an active inclusive period between `begin_at` and `end_at` + * + * Note: Range matching requires boolean AND logic. + * When using `begin_at` and `end_at` together, do not use `filter_or`=TRUE + * + * If multiple search params are given and `filter_or` is FALSE or not specified, + * search params are combined in a logical AND operation. + * Only rows that match *all* search param criteria will be returned. + * + * If `filter_or` is TRUE, multiple search params are combined in a logical OR operation. + * Results will include rows that match **any** of the search criteria. + * + * String search params use case-insensitive matching. + * String search params can contain `%` and '_' as SQL LIKE pattern match wildcard expressions. + * example="dan%" will match "danger" and "Danzig" but not "David" + * example="D_m%" will match "Damage" and "dump" + * + * Integer search params can accept a single value or a comma separated list of values. The multiple + * values will be combined under a logical OR operation - results will match at least one of + * the given values. + * + * Most search params can accept "IS NULL" and "NOT NULL" as special expressions to match + * or exclude (respectively) rows where the column is null. + * + * Boolean search params accept only "true" and "false" as values. + * + * + * Get a **single theme** by id with [Theme](#!/Theme/theme) + * + * **Note**: Custom themes needs to be enabled by Looker. Unless custom themes are enabled, only the automatically generated default theme can be used. Please contact your Account Manager or help.looker.com to update your license for this feature. + * + * + */ + @Override + public void searchThemes(SearchThemesRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/themes/search", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SearchThemesResponse.Builder responseBuilder = SearchThemesResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + SearchThemesStreamResponse.Builder responseBuilder2 = SearchThemesStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get the default theme + * + * Returns the active theme object set as the default. + * + * The **default** theme name can be set in the UI on the Admin|Theme UI page + * + * The optional `ts` parameter can specify a different timestamp than "now." If specified, it returns the default theme at the time indicated. + * + */ + @Override + public void defaultTheme(DefaultThemeRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/themes/default", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DefaultThemeResponse.Builder responseBuilder = DefaultThemeResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Set the global default theme by theme name + * + * Only Admin users can call this function. + * + * Only an active theme with no expiration (`end_at` not set) can be assigned as the default theme. As long as a theme has an active record with no expiration, it can be set as the default. + * + * [Create Theme](#!/Theme/create) has detailed information on rules for default and active themes + * + * Returns the new specified default theme object. + * + * **Note**: Custom themes needs to be enabled by Looker. Unless custom themes are enabled, only the automatically generated default theme can be used. Please contact your Account Manager or help.looker.com to update your license for this feature. + * + * + */ + @Override + public void setDefaultTheme(SetDefaultThemeRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.put("/themes/default", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SetDefaultThemeResponse.Builder responseBuilder = SetDefaultThemeResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get active themes + * + * Returns an array of active themes. + * + * If the `name` parameter is specified, it will return an array with one theme if it's active and found. + * + * The optional `ts` parameter can specify a different timestamp than "now." + * + * **Note**: Custom themes needs to be enabled by Looker. Unless custom themes are enabled, only the automatically generated default theme can be used. Please contact your Account Manager or help.looker.com to update your license for this feature. + * + * + * + */ + @Override + public void activeThemes(ActiveThemesRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/themes/active", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ActiveThemesResponse.Builder responseBuilder = ActiveThemesResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + ActiveThemesStreamResponse.Builder responseBuilder2 = ActiveThemesStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get the named theme if it's active. Otherwise, return the default theme + * + * The optional `ts` parameter can specify a different timestamp than "now." + * Note: API users with `show` ability can call this function + * + * **Note**: Custom themes needs to be enabled by Looker. Unless custom themes are enabled, only the automatically generated default theme can be used. Please contact your Account Manager or help.looker.com to update your license for this feature. + * + * + */ + @Override + public void themeOrDefault(ThemeOrDefaultRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/themes/theme_or_default", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ThemeOrDefaultResponse.Builder responseBuilder = ThemeOrDefaultResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Validate a theme with the specified information + * + * Validates all values set for the theme, returning any errors encountered, or 200 OK if valid + * + * See [Create Theme](#!/Theme/create_theme) for constraints + * + * **Note**: Custom themes needs to be enabled by Looker. Unless custom themes are enabled, only the automatically generated default theme can be used. Please contact your Account Manager or help.looker.com to update your license for this feature. + * + * + */ + @Override + public void validateTheme(ValidateThemeRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/themes/validate", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ValidateThemeResponse.Builder responseBuilder = ValidateThemeResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get a theme by ID + * + * Use this to retrieve a specific theme, whether or not it's currently active. + * + * **Note**: Custom themes needs to be enabled by Looker. Unless custom themes are enabled, only the automatically generated default theme can be used. Please contact your Account Manager or help.looker.com to update your license for this feature. + * + * + */ + @Override + public void theme(ThemeRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/themes/{theme_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + ThemeResponse.Builder responseBuilder = ThemeResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update the theme by id. + * + * **Note**: Custom themes needs to be enabled by Looker. Unless custom themes are enabled, only the automatically generated default theme can be used. Please contact your Account Manager or help.looker.com to update your license for this feature. + * + * + */ + @Override + public void updateTheme(UpdateThemeRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/themes/{theme_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateThemeResponse.Builder responseBuilder = UpdateThemeResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Delete a specific theme by id + * + * This operation permanently deletes the identified theme from the database. + * + * Because multiple themes can have the same name (with different activation time spans) themes can only be deleted by ID. + * + * All IDs associated with a theme name can be retrieved by searching for the theme name with [Theme Search](#!/Theme/search). + * + * **Note**: Custom themes needs to be enabled by Looker. Unless custom themes are enabled, only the automatically generated default theme can be used. Please contact your Account Manager or help.looker.com to update your license for this feature. + * + * + */ + @Override + public void deleteTheme(DeleteThemeRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/themes/{theme_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteThemeResponse.Builder responseBuilder = DeleteThemeResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + //#endregion Theme: Manage Themes + + //#region User: Manage Users + + /** + * ### Get information about the current user; i.e. the user account currently calling the API. + * + */ + @Override + public void me(MeRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/user", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + MeResponse.Builder responseBuilder = MeResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about all users. + * + */ + @Override + public void allUsers(AllUsersRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/users", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllUsersResponse.Builder responseBuilder = AllUsersResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + AllUsersStreamResponse.Builder responseBuilder2 = AllUsersStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create a user with the specified information. + * + */ + @Override + public void createUser(CreateUserRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/users", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateUserResponse.Builder responseBuilder = CreateUserResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Search users + * + * Returns all* user records that match the given search criteria. + * + * If multiple search params are given and `filter_or` is FALSE or not specified, + * search params are combined in a logical AND operation. + * Only rows that match *all* search param criteria will be returned. + * + * If `filter_or` is TRUE, multiple search params are combined in a logical OR operation. + * Results will include rows that match **any** of the search criteria. + * + * String search params use case-insensitive matching. + * String search params can contain `%` and '_' as SQL LIKE pattern match wildcard expressions. + * example="dan%" will match "danger" and "Danzig" but not "David" + * example="D_m%" will match "Damage" and "dump" + * + * Integer search params can accept a single value or a comma separated list of values. The multiple + * values will be combined under a logical OR operation - results will match at least one of + * the given values. + * + * Most search params can accept "IS NULL" and "NOT NULL" as special expressions to match + * or exclude (respectively) rows where the column is null. + * + * Boolean search params accept only "true" and "false" as values. + * + * + * (*) Results are always filtered to the level of information the caller is permitted to view. + * Looker admins can see all user details; normal users in an open system can see + * names of other users but no details; normal users in a closed system can only see + * names of other users who are members of the same group as the user. + * + * + */ + @Override + public void searchUsers(SearchUsersRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/users/search", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SearchUsersResponse.Builder responseBuilder = SearchUsersResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + SearchUsersStreamResponse.Builder responseBuilder2 = SearchUsersStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Search for user accounts by name + * + * Returns all user accounts where `first_name` OR `last_name` OR `email` field values match a pattern. + * The pattern can contain `%` and `_` wildcards as in SQL LIKE expressions. + * + * Any additional search params will be combined into a logical AND expression. + * + */ + @Override + public void searchUsersNames(SearchUsersNamesRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/users/search/names/{pattern}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SearchUsersNamesResponse.Builder responseBuilder = SearchUsersNamesResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + SearchUsersNamesStreamResponse.Builder responseBuilder2 = SearchUsersNamesStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about the user with a specific id. + * + * If the caller is an admin or the caller is the user being specified, then full user information will + * be returned. Otherwise, a minimal 'public' variant of the user information will be returned. This contains + * The user name and avatar url, but no sensitive information. + * + */ + @Override + public void user(UserRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/users/{user_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UserResponse.Builder responseBuilder = UserResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update information about the user with a specific id. + * + */ + @Override + public void updateUser(UpdateUserRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/users/{user_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateUserResponse.Builder responseBuilder = UpdateUserResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Delete the user with a specific id. + * + * **DANGER** this will delete the user and all looks and other information owned by the user. + * + */ + @Override + public void deleteUser(DeleteUserRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/users/{user_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteUserResponse.Builder responseBuilder = DeleteUserResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about the user with a credential of given type with specific id. + * + * This is used to do things like find users by their embed external_user_id. Or, find the user with + * a given api3 client_id, etc. The 'credential_type' matchs the 'type' name of the various credential + * types. It must be one of the values listed in the table below. The 'credential_id' is your unique Id + * for the user and is specific to each type of credential. + * + * An example using the Ruby sdk might look like: + * + * `sdk.user_for_credential('embed', 'customer-4959425')` + * + * This table shows the supported 'Credential Type' strings. The right column is for reference; it shows + * which field in the given credential type is actually searched when finding a user with the supplied + * 'credential_id'. + * + * | Credential Types | Id Field Matched | + * | ---------------- | ---------------- | + * | email | email | + * | google | google_user_id | + * | saml | saml_user_id | + * | oidc | oidc_user_id | + * | ldap | ldap_id | + * | api | token | + * | api3 | client_id | + * | embed | external_user_id | + * | looker_openid | email | + * + * NOTE: The 'api' credential type was only used with the legacy Looker query API and is no longer supported. The credential type for API you are currently looking at is 'api3'. + * + * + */ + @Override + public void userForCredential(UserForCredentialRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/users/credential/{credential_type}/{credential_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UserForCredentialResponse.Builder responseBuilder = UserForCredentialResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Email/password login information for the specified user. + */ + @Override + public void userCredentialsEmail(UserCredentialsEmailRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/users/{user_id}/credentials_email", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UserCredentialsEmailResponse.Builder responseBuilder = UserCredentialsEmailResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Email/password login information for the specified user. + */ + @Override + public void createUserCredentialsEmail(CreateUserCredentialsEmailRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/users/{user_id}/credentials_email", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateUserCredentialsEmailResponse.Builder responseBuilder = CreateUserCredentialsEmailResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Email/password login information for the specified user. + */ + @Override + public void updateUserCredentialsEmail(UpdateUserCredentialsEmailRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/users/{user_id}/credentials_email", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateUserCredentialsEmailResponse.Builder responseBuilder = UpdateUserCredentialsEmailResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Email/password login information for the specified user. + */ + @Override + public void deleteUserCredentialsEmail(DeleteUserCredentialsEmailRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/users/{user_id}/credentials_email", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteUserCredentialsEmailResponse.Builder responseBuilder = DeleteUserCredentialsEmailResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Two-factor login information for the specified user. + */ + @Override + public void userCredentialsTotp(UserCredentialsTotpRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/users/{user_id}/credentials_totp", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UserCredentialsTotpResponse.Builder responseBuilder = UserCredentialsTotpResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Two-factor login information for the specified user. + */ + @Override + public void createUserCredentialsTotp(CreateUserCredentialsTotpRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/users/{user_id}/credentials_totp", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateUserCredentialsTotpResponse.Builder responseBuilder = CreateUserCredentialsTotpResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Two-factor login information for the specified user. + */ + @Override + public void deleteUserCredentialsTotp(DeleteUserCredentialsTotpRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/users/{user_id}/credentials_totp", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteUserCredentialsTotpResponse.Builder responseBuilder = DeleteUserCredentialsTotpResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### LDAP login information for the specified user. + */ + @Override + public void userCredentialsLdap(UserCredentialsLdapRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/users/{user_id}/credentials_ldap", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UserCredentialsLdapResponse.Builder responseBuilder = UserCredentialsLdapResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### LDAP login information for the specified user. + */ + @Override + public void deleteUserCredentialsLdap(DeleteUserCredentialsLdapRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/users/{user_id}/credentials_ldap", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteUserCredentialsLdapResponse.Builder responseBuilder = DeleteUserCredentialsLdapResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Google authentication login information for the specified user. + */ + @Override + public void userCredentialsGoogle(UserCredentialsGoogleRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/users/{user_id}/credentials_google", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UserCredentialsGoogleResponse.Builder responseBuilder = UserCredentialsGoogleResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Google authentication login information for the specified user. + */ + @Override + public void deleteUserCredentialsGoogle(DeleteUserCredentialsGoogleRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/users/{user_id}/credentials_google", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteUserCredentialsGoogleResponse.Builder responseBuilder = DeleteUserCredentialsGoogleResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Saml authentication login information for the specified user. + */ + @Override + public void userCredentialsSaml(UserCredentialsSamlRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/users/{user_id}/credentials_saml", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UserCredentialsSamlResponse.Builder responseBuilder = UserCredentialsSamlResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Saml authentication login information for the specified user. + */ + @Override + public void deleteUserCredentialsSaml(DeleteUserCredentialsSamlRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/users/{user_id}/credentials_saml", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteUserCredentialsSamlResponse.Builder responseBuilder = DeleteUserCredentialsSamlResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### OpenID Connect (OIDC) authentication login information for the specified user. + */ + @Override + public void userCredentialsOidc(UserCredentialsOidcRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/users/{user_id}/credentials_oidc", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UserCredentialsOidcResponse.Builder responseBuilder = UserCredentialsOidcResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### OpenID Connect (OIDC) authentication login information for the specified user. + */ + @Override + public void deleteUserCredentialsOidc(DeleteUserCredentialsOidcRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/users/{user_id}/credentials_oidc", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteUserCredentialsOidcResponse.Builder responseBuilder = DeleteUserCredentialsOidcResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### API 3 login information for the specified user. This is for the newer API keys that can be added for any user. + */ + @Override + public void userCredentialsApi3(UserCredentialsApi3Request request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/users/{user_id}/credentials_api3/{credentials_api3_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UserCredentialsApi3Response.Builder responseBuilder = UserCredentialsApi3Response.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### API 3 login information for the specified user. This is for the newer API keys that can be added for any user. + */ + @Override + public void deleteUserCredentialsApi3(DeleteUserCredentialsApi3Request request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/users/{user_id}/credentials_api3/{credentials_api3_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteUserCredentialsApi3Response.Builder responseBuilder = DeleteUserCredentialsApi3Response.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### API 3 login information for the specified user. This is for the newer API keys that can be added for any user. + */ + @Override + public void allUserCredentialsApi3s(AllUserCredentialsApi3sRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/users/{user_id}/credentials_api3", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllUserCredentialsApi3sResponse.Builder responseBuilder = AllUserCredentialsApi3sResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + AllUserCredentialsApi3sStreamResponse.Builder responseBuilder2 = AllUserCredentialsApi3sStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### API 3 login information for the specified user. This is for the newer API keys that can be added for any user. + */ + @Override + public void createUserCredentialsApi3(CreateUserCredentialsApi3Request request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/users/{user_id}/credentials_api3", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateUserCredentialsApi3Response.Builder responseBuilder = CreateUserCredentialsApi3Response.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Embed login information for the specified user. + */ + @Override + public void userCredentialsEmbed(UserCredentialsEmbedRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/users/{user_id}/credentials_embed/{credentials_embed_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UserCredentialsEmbedResponse.Builder responseBuilder = UserCredentialsEmbedResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Embed login information for the specified user. + */ + @Override + public void deleteUserCredentialsEmbed(DeleteUserCredentialsEmbedRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/users/{user_id}/credentials_embed/{credentials_embed_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteUserCredentialsEmbedResponse.Builder responseBuilder = DeleteUserCredentialsEmbedResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Embed login information for the specified user. + */ + @Override + public void allUserCredentialsEmbeds(AllUserCredentialsEmbedsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/users/{user_id}/credentials_embed", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllUserCredentialsEmbedsResponse.Builder responseBuilder = AllUserCredentialsEmbedsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + AllUserCredentialsEmbedsStreamResponse.Builder responseBuilder2 = AllUserCredentialsEmbedsStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Looker Openid login information for the specified user. Used by Looker Analysts. + */ + @Override + public void userCredentialsLookerOpenid(UserCredentialsLookerOpenidRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/users/{user_id}/credentials_looker_openid", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UserCredentialsLookerOpenidResponse.Builder responseBuilder = UserCredentialsLookerOpenidResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Looker Openid login information for the specified user. Used by Looker Analysts. + */ + @Override + public void deleteUserCredentialsLookerOpenid(DeleteUserCredentialsLookerOpenidRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/users/{user_id}/credentials_looker_openid", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteUserCredentialsLookerOpenidResponse.Builder responseBuilder = DeleteUserCredentialsLookerOpenidResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Web login session for the specified user. + */ + @Override + public void userSession(UserSessionRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/users/{user_id}/sessions/{session_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UserSessionResponse.Builder responseBuilder = UserSessionResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Web login session for the specified user. + */ + @Override + public void deleteUserSession(DeleteUserSessionRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/users/{user_id}/sessions/{session_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteUserSessionResponse.Builder responseBuilder = DeleteUserSessionResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Web login session for the specified user. + */ + @Override + public void allUserSessions(AllUserSessionsRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/users/{user_id}/sessions", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllUserSessionsResponse.Builder responseBuilder = AllUserSessionsResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + AllUserSessionsStreamResponse.Builder responseBuilder2 = AllUserSessionsStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create a password reset token. + * This will create a cryptographically secure random password reset token for the user. + * If the user already has a password reset token then this invalidates the old token and creates a new one. + * The token is expressed as the 'password_reset_url' of the user's email/password credential object. + * This takes an optional 'expires' param to indicate if the new token should be an expiring token. + * Tokens that expire are typically used for self-service password resets for existing users. + * Invitation emails for new users typically are not set to expire. + * The expire period is always 60 minutes when expires is enabled. + * This method can be called with an empty body. + * + */ + @Override + public void createUserCredentialsEmailPasswordReset(CreateUserCredentialsEmailPasswordResetRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/users/{user_id}/credentials_email/password_reset", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateUserCredentialsEmailPasswordResetResponse.Builder responseBuilder = CreateUserCredentialsEmailPasswordResetResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about roles of a given user + * + */ + @Override + public void userRoles(UserRolesRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/users/{user_id}/roles", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UserRolesResponse.Builder responseBuilder = UserRolesResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + UserRolesStreamResponse.Builder responseBuilder2 = UserRolesStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Set roles of the user with a specific id. + * + */ + @Override + public void setUserRoles(SetUserRolesRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.put("/users/{user_id}/roles", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SetUserRolesResponse.Builder responseBuilder = SetUserRolesResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + SetUserRolesStreamResponse.Builder responseBuilder2 = SetUserRolesStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get user attribute values for a given user. + * + * Returns the values of specified user attributes (or all user attributes) for a certain user. + * + * A value for each user attribute is searched for in the following locations, in this order: + * + * 1. in the user's account information + * 1. in groups that the user is a member of + * 1. the default value of the user attribute + * + * If more than one group has a value defined for a user attribute, the group with the lowest rank wins. + * + * The response will only include user attributes for which values were found. Use `include_unset=true` to include + * empty records for user attributes with no value. + * + * The value of all hidden user attributes will be blank. + * + */ + @Override + public void userAttributeUserValues(UserAttributeUserValuesRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/users/{user_id}/attribute_values", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UserAttributeUserValuesResponse.Builder responseBuilder = UserAttributeUserValuesResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + UserAttributeUserValuesStreamResponse.Builder responseBuilder2 = UserAttributeUserValuesStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Store a custom value for a user attribute in a user's account settings. + * + * Per-user user attribute values take precedence over group or default values. + * + */ + @Override + public void setUserAttributeUserValue(SetUserAttributeUserValueRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/users/{user_id}/attribute_values/{user_attribute_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SetUserAttributeUserValueResponse.Builder responseBuilder = SetUserAttributeUserValueResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Delete a user attribute value from a user's account settings. + * + * After the user attribute value is deleted from the user's account settings, subsequent requests + * for the user attribute value for this user will draw from the user's groups or the default + * value of the user attribute. See [Get User Attribute Values](#!/User/user_attribute_user_values) for more + * information about how user attribute values are resolved. + * + */ + @Override + public void deleteUserAttributeUserValue(DeleteUserAttributeUserValueRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/users/{user_id}/attribute_values/{user_attribute_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteUserAttributeUserValueResponse.Builder responseBuilder = DeleteUserAttributeUserValueResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Send a password reset token. + * This will send a password reset email to the user. If a password reset token does not already exist + * for this user, it will create one and then send it. + * If the user has not yet set up their account, it will send a setup email to the user. + * The URL sent in the email is expressed as the 'password_reset_url' of the user's email/password credential object. + * Password reset URLs will expire in 60 minutes. + * This method can be called with an empty body. + * + */ + @Override + public void sendUserCredentialsEmailPasswordReset(SendUserCredentialsEmailPasswordResetRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/users/{user_id}/credentials_email/send_password_reset", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SendUserCredentialsEmailPasswordResetResponse.Builder responseBuilder = SendUserCredentialsEmailPasswordResetResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + //#endregion User: Manage Users + + //#region UserAttribute: Manage User Attributes + + /** + * ### Get information about all user attributes. + * + */ + @Override + public void allUserAttributes(AllUserAttributesRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/user_attributes", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllUserAttributesResponse.Builder responseBuilder = AllUserAttributesResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + AllUserAttributesStreamResponse.Builder responseBuilder2 = AllUserAttributesStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Create a new user attribute + * + * Permission information for a user attribute is conveyed through the `can` and `user_can_edit` fields. + * The `user_can_edit` field indicates whether an attribute is user-editable _anywhere_ in the application. + * The `can` field gives more granular access information, with the `set_value` child field indicating whether + * an attribute's value can be set by [Setting the User Attribute User Value](#!/User/set_user_attribute_user_value). + * + * Note: `name` and `label` fields must be unique across all user attributes in the Looker instance. + * Attempting to create a new user attribute with a name or label that duplicates an existing + * user attribute will fail with a 422 error. + * + */ + @Override + public void createUserAttribute(CreateUserAttributeRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/user_attributes", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + CreateUserAttributeResponse.Builder responseBuilder = CreateUserAttributeResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get information about a user attribute. + * + */ + @Override + public void userAttribute(UserAttributeRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/user_attributes/{user_attribute_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UserAttributeResponse.Builder responseBuilder = UserAttributeResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Update a user attribute definition. + * + */ + @Override + public void updateUserAttribute(UpdateUserAttributeRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.patch("/user_attributes/{user_attribute_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + UpdateUserAttributeResponse.Builder responseBuilder = UpdateUserAttributeResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Delete a user attribute (admin only). + * + */ + @Override + public void deleteUserAttribute(DeleteUserAttributeRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.delete("/user_attributes/{user_attribute_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + DeleteUserAttributeResponse.Builder responseBuilder = DeleteUserAttributeResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Returns all values of a user attribute defined by user groups, in precedence order. + * + * A user may be a member of multiple groups which define different values for a given user attribute. + * The order of group-values in the response determines precedence for selecting which group-value applies + * to a given user. For more information, see [Set User Attribute Group Values](#!/UserAttribute/set_user_attribute_group_values). + * + * Results will only include groups that the caller's user account has permission to see. + * + */ + @Override + public void allUserAttributeGroupValues(AllUserAttributeGroupValuesRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/user_attributes/{user_attribute_id}/group_values", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllUserAttributeGroupValuesResponse.Builder responseBuilder = AllUserAttributeGroupValuesResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + AllUserAttributeGroupValuesStreamResponse.Builder responseBuilder2 = AllUserAttributeGroupValuesStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Define values for a user attribute across a set of groups, in priority order. + * + * This function defines all values for a user attribute defined by user groups. This is a global setting, potentially affecting + * all users in the system. This function replaces any existing group value definitions for the indicated user attribute. + * + * The value of a user attribute for a given user is determined by searching the following locations, in this order: + * + * 1. the user's account settings + * 2. the groups that the user is a member of + * 3. the default value of the user attribute, if any + * + * The user may be a member of multiple groups which define different values for that user attribute. The order of items in the group_values parameter + * determines which group takes priority for that user. Lowest array index wins. + * + * An alternate method to indicate the selection precedence of group-values is to assign numbers to the 'rank' property of each + * group-value object in the array. Lowest 'rank' value wins. If you use this technique, you must assign a + * rank value to every group-value object in the array. + * + * To set a user attribute value for a single user, see [Set User Attribute User Value](#!/User/set_user_attribute_user_value). + * To set a user attribute value for all members of a group, see [Set User Attribute Group Value](#!/Group/update_user_attribute_group_value). + * + */ + @Override + public void setUserAttributeGroupValues(SetUserAttributeGroupValuesRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.post("/user_attributes/{user_attribute_id}/group_values", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + SetUserAttributeGroupValuesResponse.Builder responseBuilder = SetUserAttributeGroupValuesResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + SetUserAttributeGroupValuesStreamResponse.Builder responseBuilder2 = SetUserAttributeGroupValuesStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + //#endregion UserAttribute: Manage User Attributes + + //#region Workspace: Manage Workspaces + + /** + * ### Get All Workspaces + * + * Returns all workspaces available to the calling user. + * + */ + @Override + public void allWorkspaces(AllWorkspacesRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/workspaces", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + AllWorkspacesResponse.Builder responseBuilder = AllWorkspacesResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseBuilder.getResultList().forEach(entry -> { + AllWorkspacesStreamResponse.Builder responseBuilder2 = AllWorkspacesStreamResponse.newBuilder(); + responseBuilder2.setResult(entry); + responseObserver.onNext(responseBuilder2.build()); + }); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + /** + * ### Get A Workspace + * + * Returns information about a workspace such as the git status and selected branches + * of all projects available to the caller's user account. + * + * A workspace defines which versions of project files will be used to evaluate expressions + * and operations that use model definitions - operations such as running queries or rendering dashboards. + * Each project has its own git repository, and each project in a workspace may be configured to reference + * particular branch or revision within their respective repositories. + * + * There are two predefined workspaces available: "production" and "dev". + * + * The production workspace is shared across all Looker users. Models in the production workspace are read-only. + * Changing files in production is accomplished by modifying files in a git branch and using Pull Requests + * to merge the changes from the dev branch into the production branch, and then telling + * Looker to sync with production. + * + * The dev workspace is local to each Looker user. Changes made to project/model files in the dev workspace only affect + * that user, and only when the dev workspace is selected as the active workspace for the API session. + * (See set_session_workspace()). + * + * The dev workspace is NOT unique to an API session. Two applications accessing the Looker API using + * the same user account will see the same files in the dev workspace. To avoid collisions between + * API clients it's best to have each client login with API3 credentials for a different user account. + * + * Changes made to files in a dev workspace are persistent across API sessions. It's a good + * idea to commit any changes you've made to the git repository, but not strictly required. Your modified files + * reside in a special user-specific directory on the Looker server and will still be there when you login in again + * later and use update_session(workspace_id: "dev") to select the dev workspace for the new API session. + * + */ + @Override + public void workspace(WorkspaceRequest request, StreamObserver responseObserver) { + try { + String inputJson = JsonFormat + .printer() + .preservingProtoFieldNames() + .print(request); + LookerClientResponse lookerResponse = lookerClient.get("/workspaces/{workspace_id}", inputJson); + Status lookerStatus = lookerResponse.getStatus(); + if (lookerStatus != null) { + responseObserver.onError(lookerStatus.asRuntimeException()); + } else { + WorkspaceResponse.Builder responseBuilder = WorkspaceResponse.newBuilder(); + String outputJson = lookerResponse.getJsonResponse(); + if (outputJson != null) { + JsonFormat + .parser() + .ignoringUnknownFields() + .merge(outputJson, responseBuilder); + } + responseObserver.onNext(responseBuilder.build()); + responseObserver.onCompleted(); + } + } catch (InvalidProtocolBufferException e) { + LOGGER.error("invalid protobuf data", e); + responseObserver.onError(Status.INVALID_ARGUMENT.asRuntimeException()); + } + } + + + //#endregion Workspace: Manage Workspaces +} \ No newline at end of file diff --git a/proto/grpc_proxy/src/main/proto/ping/ping.proto b/proto/grpc_proxy/src/main/proto/ping/ping.proto new file mode 100644 index 000000000..e3f3bfcc5 --- /dev/null +++ b/proto/grpc_proxy/src/main/proto/ping/ping.proto @@ -0,0 +1,12 @@ +syntax = "proto3"; + +package looker; + +option java_package = "com.google.looker.grpc.services"; +option java_multiple_files = true; + +import 'ping/ping_model.proto'; + +service PingService { + rpc Ping(PingRequest) returns (PingResponse) {}; +} diff --git a/proto/grpc_proxy/src/main/proto/ping/ping_model.proto b/proto/grpc_proxy/src/main/proto/ping/ping_model.proto new file mode 100644 index 000000000..f5fdc5b77 --- /dev/null +++ b/proto/grpc_proxy/src/main/proto/ping/ping_model.proto @@ -0,0 +1,12 @@ +syntax = "proto3"; + +package looker; + +option java_package = "com.google.looker.server.rtl"; +option java_multiple_files = true; + +message PingRequest {} + +message PingResponse { + bool active = 1; +} diff --git a/proto/grpc_proxy/src/main/proto/sdk/methods.proto b/proto/grpc_proxy/src/main/proto/sdk/methods.proto new file mode 100644 index 000000000..ab0c67ddb --- /dev/null +++ b/proto/grpc_proxy/src/main/proto/sdk/methods.proto @@ -0,0 +1,3347 @@ +// MIT License +// +// Copyright (c) 2019 Looker Data Sciences, Inc. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +// 401 API methods + + +syntax = "proto3"; + +package looker; + +option java_package = "com.google.looker.grpc.services"; +option java_multiple_files = true; + +import 'sdk/models.proto'; + +service LookerService { + + // ApiAuth: API Authentication + + // ### Present client credentials to obtain an authorization token + // + // Looker API implements the OAuth2 [Resource Owner Password Credentials Grant](https://looker.com/docs/r/api/outh2_resource_owner_pc) pattern. + // The client credentials required for this login must be obtained by creating an API3 key on a user account + // in the Looker Admin console. The API3 key consists of a public `client_id` and a private `client_secret`. + // + // The access token returned by `login` must be used in the HTTP Authorization header of subsequent + // API requests, like this: + // ``` + // Authorization: token 4QDkCyCtZzYgj4C2p2cj3csJH7zqS5RzKs2kTnG4 + // ``` + // Replace "4QDkCy..." with the `access_token` value returned by `login`. + // The word `token` is a string literal and must be included exactly as shown. + // + // This function can accept `client_id` and `client_secret` parameters as URL query params or as www-form-urlencoded params in the body of the HTTP request. Since there is a small risk that URL parameters may be visible to intermediate nodes on the network route (proxies, routers, etc), passing credentials in the body of the request is considered more secure than URL params. + // + // Example of passing credentials in the HTTP request body: + // ```` + // POST HTTP /login + // Content-Type: application/x-www-form-urlencoded + // + // client_id=CGc9B7v7J48dQSJvxxx&client_secret=nNVS9cSS3xNpSC9JdsBvvvvv + // ```` + // + // ### Best Practice: + // Always pass credentials in body params. Pass credentials in URL query params **only** when you cannot pass body params due to application, tool, or other limitations. + // + // For more information and detailed examples of Looker API authorization, see [How to Authenticate to Looker API3](https://github.com/looker/looker-sdk-ruby/blob/master/authentication.md). + // + rpc Login(LoginRequest) returns (LoginResponse); + + // ### Create an access token that runs as a given user. + // + // This can only be called by an authenticated admin user. It allows that admin to generate a new + // authentication token for the user with the given user id. That token can then be used for subsequent + // API calls - which are then performed *as* that target user. + // + // The target user does *not* need to have a pre-existing API client_id/client_secret pair. And, no such + // credentials are created by this call. + // + // This allows for building systems where api user authentication for an arbitrary number of users is done + // outside of Looker and funneled through a single 'service account' with admin permissions. Note that a + // new access token is generated on each call. If target users are going to be making numerous API + // calls in a short period then it is wise to cache this authentication token rather than call this before + // each of those API calls. + // + // See 'login' for more detail on the access token and how to use it. + // + rpc LoginUser(LoginUserRequest) returns (LoginUserResponse); + + // ### Logout of the API and invalidate the current access token. + // + rpc Logout(LogoutRequest) returns (LogoutResponse); + + + + // Auth: Manage User Authentication Configuration + + // ### Create SSO Embed URL + // + // Creates an SSO embed URL and cryptographically signs it with an embed secret. + // This signed URL can then be used to instantiate a Looker embed session in a PBL web application. + // Do not make any modifications to this URL - any change may invalidate the signature and + // cause the URL to fail to load a Looker embed session. + // + // A signed SSO embed URL can only be used once. After it has been used to request a page from the + // Looker server, the URL is invalid. Future requests using the same URL will fail. This is to prevent + // 'replay attacks'. + // + // The `target_url` property must be a complete URL of a Looker UI page - scheme, hostname, path and query params. + // To load a dashboard with id 56 and with a filter of `Date=1 years`, the looker URL would look like `https:/myname.looker.com/dashboards/56?Date=1%20years`. + // The best way to obtain this target_url is to navigate to the desired Looker page in your web browser, + // copy the URL shown in the browser address bar and paste it into the `target_url` property as a quoted string value in this API request. + // + // Permissions for the embed user are defined by the groups in which the embed user is a member (group_ids property) + // and the lists of models and permissions assigned to the embed user. + // At a minimum, you must provide values for either the group_ids property, or both the models and permissions properties. + // These properties are additive; an embed user can be a member of certain groups AND be granted access to models and permissions. + // + // The embed user's access is the union of permissions granted by the group_ids, models, and permissions properties. + // + // This function does not strictly require all group_ids, user attribute names, or model names to exist at the moment the + // SSO embed url is created. Unknown group_id, user attribute names or model names will be passed through to the output URL. + // To diagnose potential problems with an SSO embed URL, you can copy the signed URL into the Embed URI Validator text box in `/admin/embed`. + // + // The `secret_id` parameter is optional. If specified, its value must be the id of an active secret defined in the Looker instance. + // if not specified, the URL will be signed using the newest active secret defined in the Looker instance. + // + // #### Security Note + // Protect this signed URL as you would an access token or password credentials - do not write + // it to disk, do not pass it to a third party, and only pass it through a secure HTTPS + // encrypted transport. + // + rpc CreateSsoEmbedUrl(CreateSsoEmbedUrlRequest) returns (CreateSsoEmbedUrlResponse); + + // ### Create an Embed URL + // + // Creates an embed URL that runs as the Looker user making this API call. ("Embed as me") + // This embed URL can then be used to instantiate a Looker embed session in a + // "Powered by Looker" (PBL) web application. + // + // This is similar to Private Embedding (https://docs.looker.com/r/admin/embed/private-embed). Instead of + // of logging into the Web UI to authenticate, the user has already authenticated against the API to be able to + // make this call. However, unlike Private Embed where the user has access to any other part of the Looker UI, + // the embed web session created by requesting the EmbedUrlResponse.url in a browser only has access to + // content visible under the `/embed` context. + // + // An embed URL can only be used once, and must be used within 5 minutes of being created. After it + // has been used to request a page from the Looker server, the URL is invalid. Future requests using + // the same URL will fail. This is to prevent 'replay attacks'. + // + // The `target_url` property must be a complete URL of a Looker Embedded UI page - scheme, hostname, path starting with "/embed" and query params. + // To load a dashboard with id 56 and with a filter of `Date=1 years`, the looker Embed URL would look like `https://myname.looker.com/embed/dashboards/56?Date=1%20years`. + // The best way to obtain this target_url is to navigate to the desired Looker page in your web browser, + // copy the URL shown in the browser address bar, insert "/embed" after the host/port, and paste it into the `target_url` property as a quoted string value in this API request. + // + // #### Security Note + // Protect this embed URL as you would an access token or password credentials - do not write + // it to disk, do not pass it to a third party, and only pass it through a secure HTTPS + // encrypted transport. + // + rpc CreateEmbedUrlAsMe(CreateEmbedUrlAsMeRequest) returns (CreateEmbedUrlAsMeResponse); + + // ### Get the LDAP configuration. + // + // Looker can be optionally configured to authenticate users against an Active Directory or other LDAP directory server. + // LDAP setup requires coordination with an administrator of that directory server. + // + // Only Looker administrators can read and update the LDAP configuration. + // + // Configuring LDAP impacts authentication for all users. This configuration should be done carefully. + // + // Looker maintains a single LDAP configuration. It can be read and updated. Updates only succeed if the new state will be valid (in the sense that all required fields are populated); it is up to you to ensure that the configuration is appropriate and correct). + // + // LDAP is enabled or disabled for Looker using the **enabled** field. + // + // Looker will never return an **auth_password** field. That value can be set, but never retrieved. + // + // See the [Looker LDAP docs](https://www.looker.com/docs/r/api/ldap_setup) for additional information. + // + rpc LdapConfig(LdapConfigRequest) returns (LdapConfigResponse); + + // ### Update the LDAP configuration. + // + // Configuring LDAP impacts authentication for all users. This configuration should be done carefully. + // + // Only Looker administrators can read and update the LDAP configuration. + // + // LDAP is enabled or disabled for Looker using the **enabled** field. + // + // It is **highly** recommended that any LDAP setting changes be tested using the APIs below before being set globally. + // + // See the [Looker LDAP docs](https://www.looker.com/docs/r/api/ldap_setup) for additional information. + // + rpc UpdateLdapConfig(UpdateLdapConfigRequest) returns (UpdateLdapConfigResponse); + + // ### Test the connection settings for an LDAP configuration. + // + // This tests that the connection is possible given a connection_host and connection_port. + // + // **connection_host** and **connection_port** are required. **connection_tls** is optional. + // + // Example: + // ```json + // { + // "connection_host": "ldap.example.com", + // "connection_port": "636", + // "connection_tls": true + // } + // ``` + // + // No authentication to the LDAP server is attempted. + // + // The active LDAP settings are not modified. + // + rpc TestLdapConfigConnection(TestLdapConfigConnectionRequest) returns (TestLdapConfigConnectionResponse); + + // ### Test the connection authentication settings for an LDAP configuration. + // + // This tests that the connection is possible and that a 'server' account to be used by Looker can authenticate to the LDAP server given connection and authentication information. + // + // **connection_host**, **connection_port**, and **auth_username**, are required. **connection_tls** and **auth_password** are optional. + // + // Example: + // ```json + // { + // "connection_host": "ldap.example.com", + // "connection_port": "636", + // "connection_tls": true, + // "auth_username": "cn=looker,dc=example,dc=com", + // "auth_password": "secret" + // } + // ``` + // + // Looker will never return an **auth_password**. If this request omits the **auth_password** field, then the **auth_password** value from the active config (if present) will be used for the test. + // + // The active LDAP settings are not modified. + // + // + rpc TestLdapConfigAuth(TestLdapConfigAuthRequest) returns (TestLdapConfigAuthResponse); + + // ### Test the user authentication settings for an LDAP configuration without authenticating the user. + // + // This test will let you easily test the mapping for user properties and roles for any user without needing to authenticate as that user. + // + // This test accepts a full LDAP configuration along with a username and attempts to find the full info for the user from the LDAP server without actually authenticating the user. So, user password is not required.The configuration is validated before attempting to contact the server. + // + // **test_ldap_user** is required. + // + // The active LDAP settings are not modified. + // + // + rpc TestLdapConfigUserInfo(TestLdapConfigUserInfoRequest) returns (TestLdapConfigUserInfoResponse); + + // ### Test the user authentication settings for an LDAP configuration. + // + // This test accepts a full LDAP configuration along with a username/password pair and attempts to authenticate the user with the LDAP server. The configuration is validated before attempting the authentication. + // + // Looker will never return an **auth_password**. If this request omits the **auth_password** field, then the **auth_password** value from the active config (if present) will be used for the test. + // + // **test_ldap_user** and **test_ldap_password** are required. + // + // The active LDAP settings are not modified. + // + // + rpc TestLdapConfigUserAuth(TestLdapConfigUserAuthRequest) returns (TestLdapConfigUserAuthResponse); + + // ### List All OAuth Client Apps + // + // Lists all applications registered to use OAuth2 login with this Looker instance, including + // enabled and disabled apps. + // + // Results are filtered to include only the apps that the caller (current user) + // has permission to see. + // + rpc AllOauthClientApps(AllOauthClientAppsRequest) returns (AllOauthClientAppsResponse); + + // ### Get Oauth Client App + // + // Returns the registered app client with matching client_guid. + // + rpc OauthClientApp(OauthClientAppRequest) returns (OauthClientAppResponse); + + // ### Register an OAuth2 Client App + // + // Registers details identifying an external web app or native app as an OAuth2 login client of the Looker instance. + // The app registration must provide a unique client_guid and redirect_uri that the app will present + // in OAuth login requests. If the client_guid and redirect_uri parameters in the login request do not match + // the app details registered with the Looker instance, the request is assumed to be a forgery and is rejected. + // + rpc RegisterOauthClientApp(RegisterOauthClientAppRequest) returns (RegisterOauthClientAppResponse); + + // ### Update OAuth2 Client App Details + // + // Modifies the details a previously registered OAuth2 login client app. + // + rpc UpdateOauthClientApp(UpdateOauthClientAppRequest) returns (UpdateOauthClientAppResponse); + + // ### Delete OAuth Client App + // + // Deletes the registration info of the app with the matching client_guid. + // All active sessions and tokens issued for this app will immediately become invalid. + // + // ### Note: this deletion cannot be undone. + // + rpc DeleteOauthClientApp(DeleteOauthClientAppRequest) returns (DeleteOauthClientAppResponse); + + // ### Invalidate All Issued Tokens + // + // Immediately invalidates all auth codes, sessions, access tokens and refresh tokens issued for + // this app for ALL USERS of this app. + // + rpc InvalidateTokens(InvalidateTokensRequest) returns (InvalidateTokensResponse); + + // ### Activate an app for a user + // + // Activates a user for a given oauth client app. This indicates the user has been informed that + // the app will have access to the user's looker data, and that the user has accepted and allowed + // the app to use their Looker account. + // + // Activating a user for an app that the user is already activated with returns a success response. + // + rpc ActivateAppUser(ActivateAppUserRequest) returns (ActivateAppUserResponse); + + // ### Deactivate an app for a user + // + // Deactivate a user for a given oauth client app. All tokens issued to the app for + // this user will be invalid immediately. Before the user can use the app with their + // Looker account, the user will have to read and accept an account use disclosure statement for the app. + // + // Admin users can deactivate other users, but non-admin users can only deactivate themselves. + // + // As with most REST DELETE operations, this endpoint does not return an error if the indicated + // resource (app or user) does not exist or has already been deactivated. + // + rpc DeactivateAppUser(DeactivateAppUserRequest) returns (DeactivateAppUserResponse); + + // ### Get the OIDC configuration. + // + // Looker can be optionally configured to authenticate users against an OpenID Connect (OIDC) + // authentication server. OIDC setup requires coordination with an administrator of that server. + // + // Only Looker administrators can read and update the OIDC configuration. + // + // Configuring OIDC impacts authentication for all users. This configuration should be done carefully. + // + // Looker maintains a single OIDC configuation. It can be read and updated. Updates only succeed if the new state will be valid (in the sense that all required fields are populated); it is up to you to ensure that the configuration is appropriate and correct). + // + // OIDC is enabled or disabled for Looker using the **enabled** field. + // + rpc OidcConfig(OidcConfigRequest) returns (OidcConfigResponse); + + // ### Update the OIDC configuration. + // + // Configuring OIDC impacts authentication for all users. This configuration should be done carefully. + // + // Only Looker administrators can read and update the OIDC configuration. + // + // OIDC is enabled or disabled for Looker using the **enabled** field. + // + // It is **highly** recommended that any OIDC setting changes be tested using the APIs below before being set globally. + // + rpc UpdateOidcConfig(UpdateOidcConfigRequest) returns (UpdateOidcConfigResponse); + + // ### Get a OIDC test configuration by test_slug. + // + rpc OidcTestConfig(OidcTestConfigRequest) returns (OidcTestConfigResponse); + + // ### Delete a OIDC test configuration. + // + rpc DeleteOidcTestConfig(DeleteOidcTestConfigRequest) returns (DeleteOidcTestConfigResponse); + + // ### Create a OIDC test configuration. + // + rpc CreateOidcTestConfig(CreateOidcTestConfigRequest) returns (CreateOidcTestConfigResponse); + + // ### Get password config. + // + rpc PasswordConfig(PasswordConfigRequest) returns (PasswordConfigResponse); + + // ### Update password config. + // + rpc UpdatePasswordConfig(UpdatePasswordConfigRequest) returns (UpdatePasswordConfigResponse); + + // ### Force all credentials_email users to reset their login passwords upon their next login. + // + rpc ForcePasswordResetAtNextLoginForAllUsers(ForcePasswordResetAtNextLoginForAllUsersRequest) returns (ForcePasswordResetAtNextLoginForAllUsersResponse); + + // ### Get the SAML configuration. + // + // Looker can be optionally configured to authenticate users against a SAML authentication server. + // SAML setup requires coordination with an administrator of that server. + // + // Only Looker administrators can read and update the SAML configuration. + // + // Configuring SAML impacts authentication for all users. This configuration should be done carefully. + // + // Looker maintains a single SAML configuation. It can be read and updated. Updates only succeed if the new state will be valid (in the sense that all required fields are populated); it is up to you to ensure that the configuration is appropriate and correct). + // + // SAML is enabled or disabled for Looker using the **enabled** field. + // + rpc SamlConfig(SamlConfigRequest) returns (SamlConfigResponse); + + // ### Update the SAML configuration. + // + // Configuring SAML impacts authentication for all users. This configuration should be done carefully. + // + // Only Looker administrators can read and update the SAML configuration. + // + // SAML is enabled or disabled for Looker using the **enabled** field. + // + // It is **highly** recommended that any SAML setting changes be tested using the APIs below before being set globally. + // + rpc UpdateSamlConfig(UpdateSamlConfigRequest) returns (UpdateSamlConfigResponse); + + // ### Get a SAML test configuration by test_slug. + // + rpc SamlTestConfig(SamlTestConfigRequest) returns (SamlTestConfigResponse); + + // ### Delete a SAML test configuration. + // + rpc DeleteSamlTestConfig(DeleteSamlTestConfigRequest) returns (DeleteSamlTestConfigResponse); + + // ### Create a SAML test configuration. + // + rpc CreateSamlTestConfig(CreateSamlTestConfigRequest) returns (CreateSamlTestConfigResponse); + + // ### Parse the given xml as a SAML IdP metadata document and return the result. + // + rpc ParseSamlIdpMetadata(ParseSamlIdpMetadataRequest) returns (ParseSamlIdpMetadataResponse); + + // ### Fetch the given url and parse it as a SAML IdP metadata document and return the result. + // Note that this requires that the url be public or at least at a location where the Looker instance + // can fetch it without requiring any special authentication. + // + rpc FetchAndParseSamlIdpMetadata(FetchAndParseSamlIdpMetadataRequest) returns (FetchAndParseSamlIdpMetadataResponse); + + // ### Get session config. + // + rpc SessionConfig(SessionConfigRequest) returns (SessionConfigResponse); + + // ### Update session config. + // + rpc UpdateSessionConfig(UpdateSessionConfigRequest) returns (UpdateSessionConfigResponse); + + // ### Get currently locked-out users. + // + rpc AllUserLoginLockouts(AllUserLoginLockoutsRequest) returns (AllUserLoginLockoutsResponse); + + // ### Search currently locked-out users. + // + rpc SearchUserLoginLockouts(SearchUserLoginLockoutsRequest) returns (SearchUserLoginLockoutsResponse); + + // ### Removes login lockout for the associated user. + // + rpc DeleteUserLoginLockout(DeleteUserLoginLockoutRequest) returns (DeleteUserLoginLockoutResponse); + + + + // Board: Manage Boards + + // ### Get information about all boards. + // + rpc AllBoards(AllBoardsRequest) returns (AllBoardsResponse); + + // ### Create a new board. + // + rpc CreateBoard(CreateBoardRequest) returns (CreateBoardResponse); + + // ### Search Boards + // + // If multiple search params are given and `filter_or` is FALSE or not specified, + // search params are combined in a logical AND operation. + // Only rows that match *all* search param criteria will be returned. + // + // If `filter_or` is TRUE, multiple search params are combined in a logical OR operation. + // Results will include rows that match **any** of the search criteria. + // + // String search params use case-insensitive matching. + // String search params can contain `%` and '_' as SQL LIKE pattern match wildcard expressions. + // example="dan%" will match "danger" and "Danzig" but not "David" + // example="D_m%" will match "Damage" and "dump" + // + // Integer search params can accept a single value or a comma separated list of values. The multiple + // values will be combined under a logical OR operation - results will match at least one of + // the given values. + // + // Most search params can accept "IS NULL" and "NOT NULL" as special expressions to match + // or exclude (respectively) rows where the column is null. + // + // Boolean search params accept only "true" and "false" as values. + // + // + rpc SearchBoards(SearchBoardsRequest) returns (SearchBoardsResponse); + + // ### Get information about a board. + // + rpc Board(BoardRequest) returns (BoardResponse); + + // ### Update a board definition. + // + rpc UpdateBoard(UpdateBoardRequest) returns (UpdateBoardResponse); + + // ### Delete a board. + // + rpc DeleteBoard(DeleteBoardRequest) returns (DeleteBoardResponse); + + // ### Get information about all board items. + // + rpc AllBoardItems(AllBoardItemsRequest) returns (AllBoardItemsResponse); + + // ### Create a new board item. + // + rpc CreateBoardItem(CreateBoardItemRequest) returns (CreateBoardItemResponse); + + // ### Get information about a board item. + // + rpc BoardItem(BoardItemRequest) returns (BoardItemResponse); + + // ### Update a board item definition. + // + rpc UpdateBoardItem(UpdateBoardItemRequest) returns (UpdateBoardItemResponse); + + // ### Delete a board item. + // + rpc DeleteBoardItem(DeleteBoardItemRequest) returns (DeleteBoardItemResponse); + + // ### Get information about all board sections. + // + rpc AllBoardSections(AllBoardSectionsRequest) returns (AllBoardSectionsResponse); + + // ### Create a new board section. + // + rpc CreateBoardSection(CreateBoardSectionRequest) returns (CreateBoardSectionResponse); + + // ### Get information about a board section. + // + rpc BoardSection(BoardSectionRequest) returns (BoardSectionResponse); + + // ### Update a board section definition. + // + rpc UpdateBoardSection(UpdateBoardSectionRequest) returns (UpdateBoardSectionResponse); + + // ### Delete a board section. + // + rpc DeleteBoardSection(DeleteBoardSectionRequest) returns (DeleteBoardSectionResponse); + + + + // ColorCollection: Manage Color Collections + + // ### Get an array of all existing Color Collections + // Get a **single** color collection by id with [ColorCollection](#!/ColorCollection/color_collection) + // + // Get all **standard** color collections with [ColorCollection](#!/ColorCollection/color_collections_standard) + // + // Get all **custom** color collections with [ColorCollection](#!/ColorCollection/color_collections_custom) + // + // **Note**: Only an API user with the Admin role can call this endpoint. Unauthorized requests will return `Not Found` (404) errors. + // + // + rpc AllColorCollections(AllColorCollectionsRequest) returns (AllColorCollectionsResponse); + + // ### Create a custom color collection with the specified information + // + // Creates a new custom color collection object, returning the details, including the created id. + // + // **Update** an existing color collection with [Update Color Collection](#!/ColorCollection/update_color_collection) + // + // **Permanently delete** an existing custom color collection with [Delete Color Collection](#!/ColorCollection/delete_color_collection) + // + // **Note**: Only an API user with the Admin role can call this endpoint. Unauthorized requests will return `Not Found` (404) errors. + // + // + rpc CreateColorCollection(CreateColorCollectionRequest) returns (CreateColorCollectionResponse); + + // ### Get an array of all existing **Custom** Color Collections + // Get a **single** color collection by id with [ColorCollection](#!/ColorCollection/color_collection) + // + // Get all **standard** color collections with [ColorCollection](#!/ColorCollection/color_collections_standard) + // + // **Note**: Only an API user with the Admin role can call this endpoint. Unauthorized requests will return `Not Found` (404) errors. + // + // + rpc ColorCollectionsCustom(ColorCollectionsCustomRequest) returns (ColorCollectionsCustomResponse); + + // ### Get an array of all existing **Standard** Color Collections + // Get a **single** color collection by id with [ColorCollection](#!/ColorCollection/color_collection) + // + // Get all **custom** color collections with [ColorCollection](#!/ColorCollection/color_collections_custom) + // + // **Note**: Only an API user with the Admin role can call this endpoint. Unauthorized requests will return `Not Found` (404) errors. + // + // + rpc ColorCollectionsStandard(ColorCollectionsStandardRequest) returns (ColorCollectionsStandardResponse); + + // ### Get the default color collection + // + // Use this to retrieve the default Color Collection. + // + // Set the default color collection with [ColorCollection](#!/ColorCollection/set_default_color_collection) + // + rpc DefaultColorCollection(DefaultColorCollectionRequest) returns (DefaultColorCollectionResponse); + + // ### Set the global default Color Collection by ID + // + // Returns the new specified default Color Collection object. + // **Note**: Only an API user with the Admin role can call this endpoint. Unauthorized requests will return `Not Found` (404) errors. + // + // + rpc SetDefaultColorCollection(SetDefaultColorCollectionRequest) returns (SetDefaultColorCollectionResponse); + + // ### Get a Color Collection by ID + // + // Use this to retrieve a specific Color Collection. + // Get a **single** color collection by id with [ColorCollection](#!/ColorCollection/color_collection) + // + // Get all **standard** color collections with [ColorCollection](#!/ColorCollection/color_collections_standard) + // + // Get all **custom** color collections with [ColorCollection](#!/ColorCollection/color_collections_custom) + // + // **Note**: Only an API user with the Admin role can call this endpoint. Unauthorized requests will return `Not Found` (404) errors. + // + // + rpc ColorCollection(ColorCollectionRequest) returns (ColorCollectionResponse); + + // ### Update a custom color collection by id. + // **Note**: Only an API user with the Admin role can call this endpoint. Unauthorized requests will return `Not Found` (404) errors. + // + // + rpc UpdateColorCollection(UpdateColorCollectionRequest) returns (UpdateColorCollectionResponse); + + // ### Delete a custom color collection by id + // + // This operation permanently deletes the identified **Custom** color collection. + // + // **Standard** color collections cannot be deleted + // + // Because multiple color collections can have the same label, they must be deleted by ID, not name. + // **Note**: Only an API user with the Admin role can call this endpoint. Unauthorized requests will return `Not Found` (404) errors. + // + // + rpc DeleteColorCollection(DeleteColorCollectionRequest) returns (DeleteColorCollectionResponse); + + + + // Command: Manage Commands + + // ### Get All Commands. + // + rpc GetAllCommands(GetAllCommandsRequest) returns (GetAllCommandsResponse); + + // ### Create a new command. + // # Required fields: [:name, :linked_content_id, :linked_content_type] + // # `linked_content_type` must be one of ["dashboard", "lookml_dashboard"] + // # + // + rpc CreateCommand(CreateCommandRequest) returns (CreateCommandResponse); + + // ### Update an existing custom command. + // # Optional fields: ['name', 'description'] + // # + // + rpc UpdateCommand(UpdateCommandRequest) returns (UpdateCommandResponse); + + // ### Delete an existing custom command. + // + rpc DeleteCommand(DeleteCommandRequest) returns (DeleteCommandResponse); + + + + // Config: Manage General Configuration + + // Get the current Cloud Storage Configuration. + // + rpc CloudStorageConfiguration(CloudStorageConfigurationRequest) returns (CloudStorageConfigurationResponse); + + // Update the current Cloud Storage Configuration. + // + rpc UpdateCloudStorageConfiguration(UpdateCloudStorageConfigurationRequest) returns (UpdateCloudStorageConfigurationResponse); + + // ### Get the current status and content of custom welcome emails + // + rpc CustomWelcomeEmail(CustomWelcomeEmailRequest) returns (CustomWelcomeEmailResponse); + + // Update custom welcome email setting and values. Optionally send a test email with the new content to the currently logged in user. + // + rpc UpdateCustomWelcomeEmail(UpdateCustomWelcomeEmailRequest) returns (UpdateCustomWelcomeEmailResponse); + + // Requests to this endpoint will send a welcome email with the custom content provided in the body to the currently logged in user. + // + rpc UpdateCustomWelcomeEmailTest(UpdateCustomWelcomeEmailTestRequest) returns (UpdateCustomWelcomeEmailTestResponse); + + // ### Retrieve the value for whether or not digest emails is enabled + // + rpc DigestEmailsEnabled(DigestEmailsEnabledRequest) returns (DigestEmailsEnabledResponse); + + // ### Update the setting for enabling/disabling digest emails + // + rpc UpdateDigestEmailsEnabled(UpdateDigestEmailsEnabledRequest) returns (UpdateDigestEmailsEnabledResponse); + + // ### Trigger the generation of digest email records and send them to Looker's internal system. This does not send + // any actual emails, it generates records containing content which may be of interest for users who have become inactive. + // Emails will be sent at a later time from Looker's internal system if the Digest Emails feature is enabled in settings. + rpc CreateDigestEmailSend(CreateDigestEmailSendRequest) returns (CreateDigestEmailSendResponse); + + // ### Set the menu item name and content for internal help resources + // + rpc InternalHelpResourcesContent(InternalHelpResourcesContentRequest) returns (InternalHelpResourcesContentResponse); + + // Update internal help resources content + // + rpc UpdateInternalHelpResourcesContent(UpdateInternalHelpResourcesContentRequest) returns (UpdateInternalHelpResourcesContentResponse); + + // ### Get and set the options for internal help resources + // + rpc InternalHelpResources(InternalHelpResourcesRequest) returns (InternalHelpResourcesResponse); + + // Update internal help resources settings + // + rpc UpdateInternalHelpResources(UpdateInternalHelpResourcesRequest) returns (UpdateInternalHelpResourcesResponse); + + // ### Get all legacy features. + // + rpc AllLegacyFeatures(AllLegacyFeaturesRequest) returns (AllLegacyFeaturesResponse); + + // ### Get information about the legacy feature with a specific id. + // + rpc LegacyFeature(LegacyFeatureRequest) returns (LegacyFeatureResponse); + + // ### Update information about the legacy feature with a specific id. + // + rpc UpdateLegacyFeature(UpdateLegacyFeatureRequest) returns (UpdateLegacyFeatureResponse); + + // ### Get a list of locales that Looker supports. + // + rpc AllLocales(AllLocalesRequest) returns (AllLocalesResponse); + + // ### Get a list of timezones that Looker supports (e.g. useful for scheduling tasks). + // + rpc AllTimezones(AllTimezonesRequest) returns (AllTimezonesResponse); + + // ### Get information about all API versions supported by this Looker instance. + // + rpc Versions(VersionsRequest) returns (VersionsResponse); + + // ### This feature is enabled only by special license. + // ### Gets the whitelabel configuration, which includes hiding documentation links, custom favicon uploading, etc. + // + rpc WhitelabelConfiguration(WhitelabelConfigurationRequest) returns (WhitelabelConfigurationResponse); + + // ### Update the whitelabel configuration + // + rpc UpdateWhitelabelConfiguration(UpdateWhitelabelConfigurationRequest) returns (UpdateWhitelabelConfigurationResponse); + + + + // Connection: Manage Database Connections + + // ### Get information about all connections. + // + rpc AllConnections(AllConnectionsRequest) returns (AllConnectionsResponse); + + // ### Create a connection using the specified configuration. + // + rpc CreateConnection(CreateConnectionRequest) returns (CreateConnectionResponse); + + // ### Get information about a connection. + // + rpc Connection(ConnectionRequest) returns (ConnectionResponse); + + // ### Update a connection using the specified configuration. + // + rpc UpdateConnection(UpdateConnectionRequest) returns (UpdateConnectionResponse); + + // ### Delete a connection. + // + rpc DeleteConnection(DeleteConnectionRequest) returns (DeleteConnectionResponse); + + // ### Delete a connection override. + // + rpc DeleteConnectionOverride(DeleteConnectionOverrideRequest) returns (DeleteConnectionOverrideResponse); + + // ### Test an existing connection. + // + // Note that a connection's 'dialect' property has a 'connection_tests' property that lists the + // specific types of tests that the connection supports. + // + // This API is rate limited. + // + // Unsupported tests in the request will be ignored. + // + rpc TestConnection(TestConnectionRequest) returns (TestConnectionResponse); + + // ### Test a connection configuration. + // + // Note that a connection's 'dialect' property has a 'connection_tests' property that lists the + // specific types of tests that the connection supports. + // + // This API is rate limited. + // + // Unsupported tests in the request will be ignored. + // + rpc TestConnectionConfig(TestConnectionConfigRequest) returns (TestConnectionConfigResponse); + + // ### Get information about all dialects. + // + rpc AllDialectInfos(AllDialectInfosRequest) returns (AllDialectInfosResponse); + + // ### Get all External OAuth Applications. + // + rpc AllExternalOauthApplications(AllExternalOauthApplicationsRequest) returns (AllExternalOauthApplicationsResponse); + + // ### Create an OAuth Application using the specified configuration. + // + rpc CreateExternalOauthApplication(CreateExternalOauthApplicationRequest) returns (CreateExternalOauthApplicationResponse); + + // ### Get information about all SSH Servers. + // + rpc AllSshServers(AllSshServersRequest) returns (AllSshServersResponse); + + // ### Create an SSH Server. + // + rpc CreateSshServer(CreateSshServerRequest) returns (CreateSshServerResponse); + + // ### Get information about an SSH Server. + // + rpc SshServer(SshServerRequest) returns (SshServerResponse); + + // ### Update an SSH Server. + // + rpc UpdateSshServer(UpdateSshServerRequest) returns (UpdateSshServerResponse); + + // ### Delete an SSH Server. + // + rpc DeleteSshServer(DeleteSshServerRequest) returns (DeleteSshServerResponse); + + // ### Test the SSH Server + // + rpc TestSshServer(TestSshServerRequest) returns (TestSshServerResponse); + + // ### Get information about all SSH Tunnels. + // + rpc AllSshTunnels(AllSshTunnelsRequest) returns (AllSshTunnelsResponse); + + // ### Create an SSH Tunnel + // + rpc CreateSshTunnel(CreateSshTunnelRequest) returns (CreateSshTunnelResponse); + + // ### Get information about an SSH Tunnel. + // + rpc SshTunnel(SshTunnelRequest) returns (SshTunnelResponse); + + // ### Update an SSH Tunnel + // + rpc UpdateSshTunnel(UpdateSshTunnelRequest) returns (UpdateSshTunnelResponse); + + // ### Delete an SSH Tunnel + // + rpc DeleteSshTunnel(DeleteSshTunnelRequest) returns (DeleteSshTunnelResponse); + + // ### Test the SSH Tunnel + // + rpc TestSshTunnel(TestSshTunnelRequest) returns (TestSshTunnelResponse); + + // ### Get the SSH public key + // + // Get the public key created for this instance to identify itself to a remote SSH server. + // + rpc SshPublicKey(SshPublicKeyRequest) returns (SshPublicKeyResponse); + + + + // Content: Manage Content + + // ### Search Favorite Content + // + // If multiple search params are given and `filter_or` is FALSE or not specified, + // search params are combined in a logical AND operation. + // Only rows that match *all* search param criteria will be returned. + // + // If `filter_or` is TRUE, multiple search params are combined in a logical OR operation. + // Results will include rows that match **any** of the search criteria. + // + // String search params use case-insensitive matching. + // String search params can contain `%` and '_' as SQL LIKE pattern match wildcard expressions. + // example="dan%" will match "danger" and "Danzig" but not "David" + // example="D_m%" will match "Damage" and "dump" + // + // Integer search params can accept a single value or a comma separated list of values. The multiple + // values will be combined under a logical OR operation - results will match at least one of + // the given values. + // + // Most search params can accept "IS NULL" and "NOT NULL" as special expressions to match + // or exclude (respectively) rows where the column is null. + // + // Boolean search params accept only "true" and "false" as values. + // + // + rpc SearchContentFavorites(SearchContentFavoritesRequest) returns (SearchContentFavoritesResponse); + + // ### Get favorite content by its id + rpc ContentFavorite(ContentFavoriteRequest) returns (ContentFavoriteResponse); + + // ### Delete favorite content + rpc DeleteContentFavorite(DeleteContentFavoriteRequest) returns (DeleteContentFavoriteResponse); + + // ### Create favorite content + rpc CreateContentFavorite(CreateContentFavoriteRequest) returns (CreateContentFavoriteResponse); + + // ### Get information about all content metadata in a space. + // + rpc AllContentMetadatas(AllContentMetadatasRequest) returns (AllContentMetadatasResponse); + + // ### Get information about an individual content metadata record. + // + rpc ContentMetadata(ContentMetadataRequest) returns (ContentMetadataResponse); + + // ### Move a piece of content. + // + rpc UpdateContentMetadata(UpdateContentMetadataRequest) returns (UpdateContentMetadataResponse); + + // ### All content metadata access records for a content metadata item. + // + rpc AllContentMetadataAccesses(AllContentMetadataAccessesRequest) returns (AllContentMetadataAccessesResponse); + + // ### Create content metadata access. + // + rpc CreateContentMetadataAccess(CreateContentMetadataAccessRequest) returns (CreateContentMetadataAccessResponse); + + // ### Update type of access for content metadata. + // + rpc UpdateContentMetadataAccess(UpdateContentMetadataAccessRequest) returns (UpdateContentMetadataAccessResponse); + + // ### Remove content metadata access. + // + rpc DeleteContentMetadataAccess(DeleteContentMetadataAccessRequest) returns (DeleteContentMetadataAccessResponse); + + // ### Get an image representing the contents of a dashboard or look. + // + // The returned thumbnail is an abstract representation of the contents of a dashbord or look and does not + // reflect the actual data displayed in the respective visualizations. + // + rpc ContentThumbnail(ContentThumbnailRequest) returns (ContentThumbnailResponse); + + // ### Validate All Content + // + // Performs validation of all looks and dashboards + // Returns a list of errors found as well as metadata about the content validation run. + // + rpc ContentValidation(ContentValidationRequest) returns (ContentValidationResponse); + + // ### Search Content Views + // + // If multiple search params are given and `filter_or` is FALSE or not specified, + // search params are combined in a logical AND operation. + // Only rows that match *all* search param criteria will be returned. + // + // If `filter_or` is TRUE, multiple search params are combined in a logical OR operation. + // Results will include rows that match **any** of the search criteria. + // + // String search params use case-insensitive matching. + // String search params can contain `%` and '_' as SQL LIKE pattern match wildcard expressions. + // example="dan%" will match "danger" and "Danzig" but not "David" + // example="D_m%" will match "Damage" and "dump" + // + // Integer search params can accept a single value or a comma separated list of values. The multiple + // values will be combined under a logical OR operation - results will match at least one of + // the given values. + // + // Most search params can accept "IS NULL" and "NOT NULL" as special expressions to match + // or exclude (respectively) rows where the column is null. + // + // Boolean search params accept only "true" and "false" as values. + // + // + rpc SearchContentViews(SearchContentViewsRequest) returns (SearchContentViewsResponse); + + // ### Get a vector image representing the contents of a dashboard or look. + // + // # DEPRECATED: Use [content_thumbnail()](#!/Content/content_thumbnail) + // + // The returned thumbnail is an abstract representation of the contents of a dashbord or look and does not + // reflect the actual data displayed in the respective visualizations. + // + rpc VectorThumbnail(VectorThumbnailRequest) returns (VectorThumbnailResponse); + + + + // Dashboard: Manage Dashboards + + // ### Get information about all active dashboards. + // + // Returns an array of **abbreviated dashboard objects**. Dashboards marked as deleted are excluded from this list. + // + // Get the **full details** of a specific dashboard by id with [dashboard()](#!/Dashboard/dashboard) + // + // Find **deleted dashboards** with [search_dashboards()](#!/Dashboard/search_dashboards) + // + rpc AllDashboards(AllDashboardsRequest) returns (AllDashboardsResponse); + + // ### Create a new dashboard + // + // Creates a new dashboard object and returns the details of the newly created dashboard. + // + // `Title`, `user_id`, and `space_id` are all required fields. + // `Space_id` and `user_id` must contain the id of an existing space or user, respectively. + // A dashboard's `title` must be unique within the space in which it resides. + // + // If you receive a 422 error response when creating a dashboard, be sure to look at the + // response body for information about exactly which fields are missing or contain invalid data. + // + // You can **update** an existing dashboard with [update_dashboard()](#!/Dashboard/update_dashboard) + // + // You can **permanently delete** an existing dashboard with [delete_dashboard()](#!/Dashboard/delete_dashboard) + // + rpc CreateDashboard(CreateDashboardRequest) returns (CreateDashboardResponse); + + // ### Search Dashboards + // + // Returns an **array of dashboard objects** that match the specified search criteria. + // + // If multiple search params are given and `filter_or` is FALSE or not specified, + // search params are combined in a logical AND operation. + // Only rows that match *all* search param criteria will be returned. + // + // If `filter_or` is TRUE, multiple search params are combined in a logical OR operation. + // Results will include rows that match **any** of the search criteria. + // + // String search params use case-insensitive matching. + // String search params can contain `%` and '_' as SQL LIKE pattern match wildcard expressions. + // example="dan%" will match "danger" and "Danzig" but not "David" + // example="D_m%" will match "Damage" and "dump" + // + // Integer search params can accept a single value or a comma separated list of values. The multiple + // values will be combined under a logical OR operation - results will match at least one of + // the given values. + // + // Most search params can accept "IS NULL" and "NOT NULL" as special expressions to match + // or exclude (respectively) rows where the column is null. + // + // Boolean search params accept only "true" and "false" as values. + // + // + // The parameters `limit`, and `offset` are recommended for fetching results in page-size chunks. + // + // Get a **single dashboard** by id with [dashboard()](#!/Dashboard/dashboard) + // + rpc SearchDashboards(SearchDashboardsRequest) returns (SearchDashboardsResponse); + + // ### Import a LookML dashboard to a space as a UDD + // Creates a UDD (a dashboard which exists in the Looker database rather than as a LookML file) from the LookML dashboard + // and puts it in the space specified. The created UDD will have a lookml_link_id which links to the original LookML dashboard. + // + // To give the imported dashboard specify a (e.g. title: "my title") in the body of your request, otherwise the imported + // dashboard will have the same title as the original LookML dashboard. + // + // For this operation to succeed the user must have permission to see the LookML dashboard in question, and have permission to + // create content in the space the dashboard is being imported to. + // + // **Sync** a linked UDD with [sync_lookml_dashboard()](#!/Dashboard/sync_lookml_dashboard) + // **Unlink** a linked UDD by setting lookml_link_id to null with [update_dashboard()](#!/Dashboard/update_dashboard) + // + rpc ImportLookmlDashboard(ImportLookmlDashboardRequest) returns (ImportLookmlDashboardResponse); + + // ### Update all linked dashboards to match the specified LookML dashboard. + // + // Any UDD (a dashboard which exists in the Looker database rather than as a LookML file) which has a `lookml_link_id` + // property value referring to a LookML dashboard's id (model::dashboardname) will be updated so that it matches the current state of the LookML dashboard. + // + // For this operation to succeed the user must have permission to view the LookML dashboard, and only linked dashboards + // that the user has permission to update will be synced. + // + // To **link** or **unlink** a UDD set the `lookml_link_id` property with [update_dashboard()](#!/Dashboard/update_dashboard) + // + rpc SyncLookmlDashboard(SyncLookmlDashboardRequest) returns (SyncLookmlDashboardResponse); + + // ### Get information about a dashboard + // + // Returns the full details of the identified dashboard object + // + // Get a **summary list** of all active dashboards with [all_dashboards()](#!/Dashboard/all_dashboards) + // + // You can **Search** for dashboards with [search_dashboards()](#!/Dashboard/search_dashboards) + // + rpc Dashboard(DashboardRequest) returns (DashboardResponse); + + // ### Update a dashboard + // + // You can use this function to change the string and integer properties of + // a dashboard. Nested objects such as filters, dashboard elements, or dashboard layout components + // cannot be modified by this function - use the update functions for the respective + // nested object types (like [update_dashboard_filter()](#!/3.1/Dashboard/update_dashboard_filter) to change a filter) + // to modify nested objects referenced by a dashboard. + // + // If you receive a 422 error response when updating a dashboard, be sure to look at the + // response body for information about exactly which fields are missing or contain invalid data. + // + rpc UpdateDashboard(UpdateDashboardRequest) returns (UpdateDashboardResponse); + + // ### Delete the dashboard with the specified id + // + // Permanently **deletes** a dashboard. (The dashboard cannot be recovered after this operation.) + // + // "Soft" delete or hide a dashboard by setting its `deleted` status to `True` with [update_dashboard()](#!/Dashboard/update_dashboard). + // + // Note: When a dashboard is deleted in the UI, it is soft deleted. Use this API call to permanently remove it, if desired. + // + rpc DeleteDashboard(DeleteDashboardRequest) returns (DeleteDashboardResponse); + + // ### Get Aggregate Table LookML for Each Query on a Dahboard + // + // Returns a JSON object that contains the dashboard id and Aggregate Table lookml + // + // + rpc DashboardAggregateTableLookml(DashboardAggregateTableLookmlRequest) returns (DashboardAggregateTableLookmlResponse); + + // ### Get lookml of a UDD + // + // Returns a JSON object that contains the dashboard id and the full lookml + // + // + rpc DashboardLookml(DashboardLookmlRequest) returns (DashboardLookmlResponse); + + // ### Search Dashboard Elements + // + // Returns an **array of DashboardElement objects** that match the specified search criteria. + // + // If multiple search params are given and `filter_or` is FALSE or not specified, + // search params are combined in a logical AND operation. + // Only rows that match *all* search param criteria will be returned. + // + // If `filter_or` is TRUE, multiple search params are combined in a logical OR operation. + // Results will include rows that match **any** of the search criteria. + // + // String search params use case-insensitive matching. + // String search params can contain `%` and '_' as SQL LIKE pattern match wildcard expressions. + // example="dan%" will match "danger" and "Danzig" but not "David" + // example="D_m%" will match "Damage" and "dump" + // + // Integer search params can accept a single value or a comma separated list of values. The multiple + // values will be combined under a logical OR operation - results will match at least one of + // the given values. + // + // Most search params can accept "IS NULL" and "NOT NULL" as special expressions to match + // or exclude (respectively) rows where the column is null. + // + // Boolean search params accept only "true" and "false" as values. + // + // + rpc SearchDashboardElements(SearchDashboardElementsRequest) returns (SearchDashboardElementsResponse); + + // ### Get information about the dashboard element with a specific id. + rpc DashboardElement(DashboardElementRequest) returns (DashboardElementResponse); + + // ### Update the dashboard element with a specific id. + rpc UpdateDashboardElement(UpdateDashboardElementRequest) returns (UpdateDashboardElementResponse); + + // ### Delete a dashboard element with a specific id. + rpc DeleteDashboardElement(DeleteDashboardElementRequest) returns (DeleteDashboardElementResponse); + + // ### Get information about all the dashboard elements on a dashboard with a specific id. + rpc DashboardDashboardElements(DashboardDashboardElementsRequest) returns (DashboardDashboardElementsResponse); + + // ### Create a dashboard element on the dashboard with a specific id. + rpc CreateDashboardElement(CreateDashboardElementRequest) returns (CreateDashboardElementResponse); + + // ### Get information about the dashboard filters with a specific id. + rpc DashboardFilter(DashboardFilterRequest) returns (DashboardFilterResponse); + + // ### Update the dashboard filter with a specific id. + rpc UpdateDashboardFilter(UpdateDashboardFilterRequest) returns (UpdateDashboardFilterResponse); + + // ### Delete a dashboard filter with a specific id. + rpc DeleteDashboardFilter(DeleteDashboardFilterRequest) returns (DeleteDashboardFilterResponse); + + // ### Get information about all the dashboard filters on a dashboard with a specific id. + rpc DashboardDashboardFilters(DashboardDashboardFiltersRequest) returns (DashboardDashboardFiltersResponse); + + // ### Create a dashboard filter on the dashboard with a specific id. + rpc CreateDashboardFilter(CreateDashboardFilterRequest) returns (CreateDashboardFilterResponse); + + // ### Get information about the dashboard elements with a specific id. + rpc DashboardLayoutComponent(DashboardLayoutComponentRequest) returns (DashboardLayoutComponentResponse); + + // ### Update the dashboard element with a specific id. + rpc UpdateDashboardLayoutComponent(UpdateDashboardLayoutComponentRequest) returns (UpdateDashboardLayoutComponentResponse); + + // ### Get information about all the dashboard layout components for a dashboard layout with a specific id. + rpc DashboardLayoutDashboardLayoutComponents(DashboardLayoutDashboardLayoutComponentsRequest) returns (DashboardLayoutDashboardLayoutComponentsResponse); + + // ### Get information about the dashboard layouts with a specific id. + rpc DashboardLayout(DashboardLayoutRequest) returns (DashboardLayoutResponse); + + // ### Update the dashboard layout with a specific id. + rpc UpdateDashboardLayout(UpdateDashboardLayoutRequest) returns (UpdateDashboardLayoutResponse); + + // ### Delete a dashboard layout with a specific id. + rpc DeleteDashboardLayout(DeleteDashboardLayoutRequest) returns (DeleteDashboardLayoutResponse); + + // ### Get information about all the dashboard elements on a dashboard with a specific id. + rpc DashboardDashboardLayouts(DashboardDashboardLayoutsRequest) returns (DashboardDashboardLayoutsResponse); + + // ### Create a dashboard layout on the dashboard with a specific id. + rpc CreateDashboardLayout(CreateDashboardLayoutRequest) returns (CreateDashboardLayoutResponse); + + + + // DataAction: Run Data Actions + + // Perform a data action. The data action object can be obtained from query results, and used to perform an arbitrary action. + rpc PerformDataAction(PerformDataActionRequest) returns (PerformDataActionResponse); + + // For some data actions, the remote server may supply a form requesting further user input. This endpoint takes a data action, asks the remote server to generate a form for it, and returns that form to you for presentation to the user. + rpc FetchRemoteDataActionForm(FetchRemoteDataActionFormRequest) returns (FetchRemoteDataActionFormResponse); + + + + // Datagroup: Manage Datagroups + + // ### Get information about all datagroups. + // + rpc AllDatagroups(AllDatagroupsRequest) returns (AllDatagroupsResponse); + + // ### Get information about a datagroup. + // + rpc Datagroup(DatagroupRequest) returns (DatagroupResponse); + + // ### Update a datagroup using the specified params. + // + rpc UpdateDatagroup(UpdateDatagroupRequest) returns (UpdateDatagroupResponse); + + + + // Folder: Manage Folders + + // Search for folders by creator id, parent id, name, etc + rpc SearchFolders(SearchFoldersRequest) returns (SearchFoldersResponse); + + // ### Get information about the folder with a specific id. + rpc Folder(FolderRequest) returns (FolderResponse); + + // ### Update the folder with a specific id. + rpc UpdateFolder(UpdateFolderRequest) returns (UpdateFolderResponse); + + // ### Delete the folder with a specific id including any children folders. + // **DANGER** this will delete all looks and dashboards in the folder. + // + rpc DeleteFolder(DeleteFolderRequest) returns (DeleteFolderResponse); + + // ### Get information about all folders. + // + // In API 3.x, this will not return empty personal folders, unless they belong to the calling user. + // In API 4.0+, all personal folders will be returned. + // + // + rpc AllFolders(AllFoldersRequest) returns (AllFoldersResponse); + + // ### Create a folder with specified information. + // + // Caller must have permission to edit the parent folder and to create folders, otherwise the request + // returns 404 Not Found. + // + rpc CreateFolder(CreateFolderRequest) returns (CreateFolderResponse); + + // ### Get the children of a folder. + rpc FolderChildren(FolderChildrenRequest) returns (FolderChildrenResponse); + + // ### Search the children of a folder + rpc FolderChildrenSearch(FolderChildrenSearchRequest) returns (FolderChildrenSearchResponse); + + // ### Get the parent of a folder + rpc FolderParent(FolderParentRequest) returns (FolderParentResponse); + + // ### Get the ancestors of a folder + rpc FolderAncestors(FolderAncestorsRequest) returns (FolderAncestorsResponse); + + // ### Get all looks in a folder. + // In API 3.x, this will return all looks in a folder, including looks in the trash. + // In API 4.0+, all looks in a folder will be returned, excluding looks in the trash. + // + rpc FolderLooks(FolderLooksRequest) returns (FolderLooksResponse); + + // ### Get the dashboards in a folder + rpc FolderDashboards(FolderDashboardsRequest) returns (FolderDashboardsResponse); + + + + // Group: Manage Groups + + // ### Get information about all groups. + // + rpc AllGroups(AllGroupsRequest) returns (AllGroupsResponse); + + // ### Creates a new group (admin only). + // + rpc CreateGroup(CreateGroupRequest) returns (CreateGroupResponse); + + // ### Search groups + // + // Returns all group records that match the given search criteria. + // + // If multiple search params are given and `filter_or` is FALSE or not specified, + // search params are combined in a logical AND operation. + // Only rows that match *all* search param criteria will be returned. + // + // If `filter_or` is TRUE, multiple search params are combined in a logical OR operation. + // Results will include rows that match **any** of the search criteria. + // + // String search params use case-insensitive matching. + // String search params can contain `%` and '_' as SQL LIKE pattern match wildcard expressions. + // example="dan%" will match "danger" and "Danzig" but not "David" + // example="D_m%" will match "Damage" and "dump" + // + // Integer search params can accept a single value or a comma separated list of values. The multiple + // values will be combined under a logical OR operation - results will match at least one of + // the given values. + // + // Most search params can accept "IS NULL" and "NOT NULL" as special expressions to match + // or exclude (respectively) rows where the column is null. + // + // Boolean search params accept only "true" and "false" as values. + // + // + rpc SearchGroups(SearchGroupsRequest) returns (SearchGroupsResponse); + + // ### Search groups include roles + // + // Returns all group records that match the given search criteria, and attaches any associated roles. + // + // If multiple search params are given and `filter_or` is FALSE or not specified, + // search params are combined in a logical AND operation. + // Only rows that match *all* search param criteria will be returned. + // + // If `filter_or` is TRUE, multiple search params are combined in a logical OR operation. + // Results will include rows that match **any** of the search criteria. + // + // String search params use case-insensitive matching. + // String search params can contain `%` and '_' as SQL LIKE pattern match wildcard expressions. + // example="dan%" will match "danger" and "Danzig" but not "David" + // example="D_m%" will match "Damage" and "dump" + // + // Integer search params can accept a single value or a comma separated list of values. The multiple + // values will be combined under a logical OR operation - results will match at least one of + // the given values. + // + // Most search params can accept "IS NULL" and "NOT NULL" as special expressions to match + // or exclude (respectively) rows where the column is null. + // + // Boolean search params accept only "true" and "false" as values. + // + // + rpc SearchGroupsWithRoles(SearchGroupsWithRolesRequest) returns (SearchGroupsWithRolesResponse); + + // ### Search groups include hierarchy + // + // Returns all group records that match the given search criteria, and attaches + // associated role_ids and parent group_ids. + // + // If multiple search params are given and `filter_or` is FALSE or not specified, + // search params are combined in a logical AND operation. + // Only rows that match *all* search param criteria will be returned. + // + // If `filter_or` is TRUE, multiple search params are combined in a logical OR operation. + // Results will include rows that match **any** of the search criteria. + // + // String search params use case-insensitive matching. + // String search params can contain `%` and '_' as SQL LIKE pattern match wildcard expressions. + // example="dan%" will match "danger" and "Danzig" but not "David" + // example="D_m%" will match "Damage" and "dump" + // + // Integer search params can accept a single value or a comma separated list of values. The multiple + // values will be combined under a logical OR operation - results will match at least one of + // the given values. + // + // Most search params can accept "IS NULL" and "NOT NULL" as special expressions to match + // or exclude (respectively) rows where the column is null. + // + // Boolean search params accept only "true" and "false" as values. + // + // + rpc SearchGroupsWithHierarchy(SearchGroupsWithHierarchyRequest) returns (SearchGroupsWithHierarchyResponse); + + // ### Get information about a group. + // + rpc Group(GroupRequest) returns (GroupResponse); + + // ### Updates the a group (admin only). + rpc UpdateGroup(UpdateGroupRequest) returns (UpdateGroupResponse); + + // ### Deletes a group (admin only). + // + rpc DeleteGroup(DeleteGroupRequest) returns (DeleteGroupResponse); + + // ### Get information about all the groups in a group + // + rpc AllGroupGroups(AllGroupGroupsRequest) returns (AllGroupGroupsResponse); + + // ### Adds a new group to a group. + // + rpc AddGroupGroup(AddGroupGroupRequest) returns (AddGroupGroupResponse); + + // ### Get information about all the users directly included in a group. + // + rpc AllGroupUsers(AllGroupUsersRequest) returns (AllGroupUsersResponse); + + // ### Adds a new user to a group. + // + rpc AddGroupUser(AddGroupUserRequest) returns (AddGroupUserResponse); + + // ### Removes a user from a group. + // + rpc DeleteGroupUser(DeleteGroupUserRequest) returns (DeleteGroupUserResponse); + + // ### Removes a group from a group. + // + rpc DeleteGroupFromGroup(DeleteGroupFromGroupRequest) returns (DeleteGroupFromGroupResponse); + + // ### Set the value of a user attribute for a group. + // + // For information about how user attribute values are calculated, see [Set User Attribute Group Values](#!/UserAttribute/set_user_attribute_group_values). + // + rpc UpdateUserAttributeGroupValue(UpdateUserAttributeGroupValueRequest) returns (UpdateUserAttributeGroupValueResponse); + + // ### Remove a user attribute value from a group. + // + rpc DeleteUserAttributeGroupValue(DeleteUserAttributeGroupValueRequest) returns (DeleteUserAttributeGroupValueResponse); + + + + // Homepage: Manage Homepage + + // ### Get information about the primary homepage's sections. + // + rpc AllPrimaryHomepageSections(AllPrimaryHomepageSectionsRequest) returns (AllPrimaryHomepageSectionsResponse); + + + + // Integration: Manage Integrations + + // ### Get information about all Integration Hubs. + // + rpc AllIntegrationHubs(AllIntegrationHubsRequest) returns (AllIntegrationHubsResponse); + + // ### Create a new Integration Hub. + // + // This API is rate limited to prevent it from being used for SSRF attacks + // + rpc CreateIntegrationHub(CreateIntegrationHubRequest) returns (CreateIntegrationHubResponse); + + // ### Get information about a Integration Hub. + // + rpc IntegrationHub(IntegrationHubRequest) returns (IntegrationHubResponse); + + // ### Update a Integration Hub definition. + // + // This API is rate limited to prevent it from being used for SSRF attacks + // + rpc UpdateIntegrationHub(UpdateIntegrationHubRequest) returns (UpdateIntegrationHubResponse); + + // ### Delete a Integration Hub. + // + rpc DeleteIntegrationHub(DeleteIntegrationHubRequest) returns (DeleteIntegrationHubResponse); + + // Accepts the legal agreement for a given integration hub. This only works for integration hubs that have legal_agreement_required set to true and legal_agreement_signed set to false. + rpc AcceptIntegrationHubLegalAgreement(AcceptIntegrationHubLegalAgreementRequest) returns (AcceptIntegrationHubLegalAgreementResponse); + + // ### Get information about all Integrations. + // + rpc AllIntegrations(AllIntegrationsRequest) returns (AllIntegrationsResponse); + + // ### Get information about a Integration. + // + rpc Integration(IntegrationRequest) returns (IntegrationResponse); + + // ### Update parameters on a Integration. + // + rpc UpdateIntegration(UpdateIntegrationRequest) returns (UpdateIntegrationResponse); + + // Returns the Integration form for presentation to the user. + rpc FetchIntegrationForm(FetchIntegrationFormRequest) returns (FetchIntegrationFormResponse); + + // Tests the integration to make sure all the settings are working. + rpc TestIntegration(TestIntegrationRequest) returns (TestIntegrationResponse); + + + + // Look: Run and Manage Looks + + // ### Get information about all active Looks + // + // Returns an array of **abbreviated Look objects** describing all the looks that the caller has access to. Soft-deleted Looks are **not** included. + // + // Get the **full details** of a specific look by id with [look(id)](#!/Look/look) + // + // Find **soft-deleted looks** with [search_looks()](#!/Look/search_looks) + // + rpc AllLooks(AllLooksRequest) returns (AllLooksResponse); + + // ### Create a Look + // + // To create a look to display query data, first create the query with [create_query()](#!/Query/create_query) + // then assign the query's id to the `query_id` property in the call to `create_look()`. + // + // To place the look into a particular space, assign the space's id to the `space_id` property + // in the call to `create_look()`. + // + rpc CreateLook(CreateLookRequest) returns (CreateLookResponse); + + // ### Search Looks + // + // Returns an **array of Look objects** that match the specified search criteria. + // + // If multiple search params are given and `filter_or` is FALSE or not specified, + // search params are combined in a logical AND operation. + // Only rows that match *all* search param criteria will be returned. + // + // If `filter_or` is TRUE, multiple search params are combined in a logical OR operation. + // Results will include rows that match **any** of the search criteria. + // + // String search params use case-insensitive matching. + // String search params can contain `%` and '_' as SQL LIKE pattern match wildcard expressions. + // example="dan%" will match "danger" and "Danzig" but not "David" + // example="D_m%" will match "Damage" and "dump" + // + // Integer search params can accept a single value or a comma separated list of values. The multiple + // values will be combined under a logical OR operation - results will match at least one of + // the given values. + // + // Most search params can accept "IS NULL" and "NOT NULL" as special expressions to match + // or exclude (respectively) rows where the column is null. + // + // Boolean search params accept only "true" and "false" as values. + // + // + // Get a **single look** by id with [look(id)](#!/Look/look) + // + rpc SearchLooks(SearchLooksRequest) returns (SearchLooksResponse); + + // ### Get a Look. + // + // Returns detailed information about a Look and its associated Query. + // + // + rpc Look(LookRequest) returns (LookResponse); + + // ### Modify a Look + // + // Use this function to modify parts of a look. Property values given in a call to `update_look` are + // applied to the existing look, so there's no need to include properties whose values are not changing. + // It's best to specify only the properties you want to change and leave everything else out + // of your `update_look` call. **Look properties marked 'read-only' will be ignored.** + // + // When a user deletes a look in the Looker UI, the look data remains in the database but is + // marked with a deleted flag ("soft-deleted"). Soft-deleted looks can be undeleted (by an admin) + // if the delete was in error. + // + // To soft-delete a look via the API, use [update_look()](#!/Look/update_look) to change the look's `deleted` property to `true`. + // You can undelete a look by calling `update_look` to change the look's `deleted` property to `false`. + // + // Soft-deleted looks are excluded from the results of [all_looks()](#!/Look/all_looks) and [search_looks()](#!/Look/search_looks), so they + // essentially disappear from view even though they still reside in the db. + // In API 3.1 and later, you can pass `deleted: true` as a parameter to [search_looks()](#!/3.1/Look/search_looks) to list soft-deleted looks. + // + // NOTE: [delete_look()](#!/Look/delete_look) performs a "hard delete" - the look data is removed from the Looker + // database and destroyed. There is no "undo" for `delete_look()`. + // + rpc UpdateLook(UpdateLookRequest) returns (UpdateLookResponse); + + // ### Permanently Delete a Look + // + // This operation **permanently** removes a look from the Looker database. + // + // NOTE: There is no "undo" for this kind of delete. + // + // For information about soft-delete (which can be undone) see [update_look()](#!/Look/update_look). + // + rpc DeleteLook(DeleteLookRequest) returns (DeleteLookResponse); + + // ### Run a Look + // + // Runs a given look's query and returns the results in the requested format. + // + // Supported formats: + // + // | result_format | Description + // | :-----------: | :--- | + // | json | Plain json + // | json_detail | Row data plus metadata describing the fields, pivots, table calcs, and other aspects of the query + // | csv | Comma separated values with a header + // | txt | Tab separated values with a header + // | html | Simple html + // | md | Simple markdown + // | xlsx | MS Excel spreadsheet + // | sql | Returns the generated SQL rather than running the query + // | png | A PNG image of the visualization of the query + // | jpg | A JPG image of the visualization of the query + // + // + // + rpc RunLook(RunLookRequest) returns (RunLookResponse); + + + + // LookmlModel: Manage LookML Models + + // ### Get information about all lookml models. + // + rpc AllLookmlModels(AllLookmlModelsRequest) returns (AllLookmlModelsResponse); + + // ### Create a lookml model using the specified configuration. + // + rpc CreateLookmlModel(CreateLookmlModelRequest) returns (CreateLookmlModelResponse); + + // ### Get information about a lookml model. + // + rpc LookmlModel(LookmlModelRequest) returns (LookmlModelResponse); + + // ### Update a lookml model using the specified configuration. + // + rpc UpdateLookmlModel(UpdateLookmlModelRequest) returns (UpdateLookmlModelResponse); + + // ### Delete a lookml model. + // + rpc DeleteLookmlModel(DeleteLookmlModelRequest) returns (DeleteLookmlModelResponse); + + // ### Get information about a lookml model explore. + // + rpc LookmlModelExplore(LookmlModelExploreRequest) returns (LookmlModelExploreResponse); + + + + // Metadata: Connection Metadata Features + + // ### Field name suggestions for a model and view + // + // + rpc ModelFieldnameSuggestions(ModelFieldnameSuggestionsRequest) returns (ModelFieldnameSuggestionsResponse); + + // ### List databases available to this connection + // + // Certain dialects can support multiple databases per single connection. + // If this connection supports multiple databases, the database names will be returned in an array. + // + // Connections using dialects that do not support multiple databases will return an empty array. + // + // **Note**: [Connection Features](#!/Metadata/connection_features) can be used to determine if a connection supports + // multiple databases. + // + rpc ConnectionDatabases(ConnectionDatabasesRequest) returns (ConnectionDatabasesResponse); + + // ### Retrieve metadata features for this connection + // + // Returns a list of feature names with `true` (available) or `false` (not available) + // + // + rpc ConnectionFeatures(ConnectionFeaturesRequest) returns (ConnectionFeaturesResponse); + + // ### Get the list of schemas and tables for a connection + // + // + rpc ConnectionSchemas(ConnectionSchemasRequest) returns (ConnectionSchemasResponse); + + // ### Get the list of tables for a schema + // + // For dialects that support multiple databases, optionally identify which to use. If not provided, the default + // database for the connection will be used. + // + // For dialects that do **not** support multiple databases, **do not use** the database parameter + // + rpc ConnectionTables(ConnectionTablesRequest) returns (ConnectionTablesResponse); + + // ### Get the columns (and therefore also the tables) in a specific schema + // + // + rpc ConnectionColumns(ConnectionColumnsRequest) returns (ConnectionColumnsResponse); + + // ### Search a connection for columns matching the specified name + // + // **Note**: `column_name` must be a valid column name. It is not a search pattern. + // + rpc ConnectionSearchColumns(ConnectionSearchColumnsRequest) returns (ConnectionSearchColumnsResponse); + + // ### Connection cost estimating + // + // Assign a `sql` statement to the body of the request. e.g., for Ruby, `{sql: 'select * from users'}` + // + // **Note**: If the connection's dialect has no support for cost estimates, an error will be returned + // + rpc ConnectionCostEstimate(ConnectionCostEstimateRequest) returns (ConnectionCostEstimateResponse); + + + + // Project: Manage Projects + + // ### Generate Lockfile for All LookML Dependencies + // + // Git must have been configured, must be in dev mode and deploy permission required + // + // Install_all is a two step process + // 1. For each remote_dependency in a project the dependency manager will resolve any ambiguous ref. + // 2. The project will then write out a lockfile including each remote_dependency with its resolved ref. + // + // + rpc LockAll(LockAllRequest) returns (LockAllResponse); + + // ### Get All Git Branches + // + // Returns a list of git branches in the project repository + // + rpc AllGitBranches(AllGitBranchesRequest) returns (AllGitBranchesResponse); + + // ### Get the Current Git Branch + // + // Returns the git branch currently checked out in the given project repository + // + rpc GitBranch(GitBranchRequest) returns (GitBranchResponse); + + // ### Checkout and/or reset --hard an existing Git Branch + // + // Only allowed in development mode + // - Call `update_session` to select the 'dev' workspace. + // + // Checkout an existing branch if name field is different from the name of the currently checked out branch. + // + // Optionally specify a branch name, tag name or commit SHA to which the branch should be reset. + // **DANGER** hard reset will be force pushed to the remote. Unsaved changes and commits may be permanently lost. + // + // + rpc UpdateGitBranch(UpdateGitBranchRequest) returns (UpdateGitBranchResponse); + + // ### Create and Checkout a Git Branch + // + // Creates and checks out a new branch in the given project repository + // Only allowed in development mode + // - Call `update_session` to select the 'dev' workspace. + // + // Optionally specify a branch name, tag name or commit SHA as the start point in the ref field. + // If no ref is specified, HEAD of the current branch will be used as the start point for the new branch. + // + // + rpc CreateGitBranch(CreateGitBranchRequest) returns (CreateGitBranchResponse); + + // ### Get the specified Git Branch + // + // Returns the git branch specified in branch_name path param if it exists in the given project repository + // + rpc FindGitBranch(FindGitBranchRequest) returns (FindGitBranchResponse); + + // ### Delete the specified Git Branch + // + // Delete git branch specified in branch_name path param from local and remote of specified project repository + // + rpc DeleteGitBranch(DeleteGitBranchRequest) returns (DeleteGitBranchResponse); + + // ### Deploy a Remote Branch or Ref to Production + // + // Git must have been configured and deploy permission required. + // + // Deploy is a one/two step process + // 1. If this is the first deploy of this project, create the production project with git repository. + // 2. Pull the branch or ref into the production project. + // + // Can only specify either a branch or a ref. + // + // + rpc DeployRefToProduction(DeployRefToProductionRequest) returns (DeployRefToProductionResponse); + + // ### Deploy LookML from this Development Mode Project to Production + // + // Git must have been configured, must be in dev mode and deploy permission required + // + // Deploy is a two / three step process: + // + // 1. Push commits in current branch of dev mode project to the production branch (origin/master). + // Note a. This step is skipped in read-only projects. + // Note b. If this step is unsuccessful for any reason (e.g. rejected non-fastforward because production branch has + // commits not in current branch), subsequent steps will be skipped. + // 2. If this is the first deploy of this project, create the production project with git repository. + // 3. Pull the production branch into the production project. + // + // + rpc DeployToProduction(DeployToProductionRequest) returns (DeployToProductionResponse); + + // ### Reset a project to the revision of the project that is in production. + // + // **DANGER** this will delete any changes that have not been pushed to a remote repository. + // + rpc ResetProjectToProduction(ResetProjectToProductionRequest) returns (ResetProjectToProductionResponse); + + // ### Reset a project development branch to the revision of the project that is on the remote. + // + // **DANGER** this will delete any changes that have not been pushed to a remote repository. + // + rpc ResetProjectToRemote(ResetProjectToRemoteRequest) returns (ResetProjectToRemoteResponse); + + // ### Get All Projects + // + // Returns all projects visible to the current user + // + rpc AllProjects(AllProjectsRequest) returns (AllProjectsResponse); + + // ### Create A Project + // + // dev mode required. + // - Call `update_session` to select the 'dev' workspace. + // + // `name` is required. + // `git_remote_url` is not allowed. To configure Git for the newly created project, follow the instructions in `update_project`. + // + // + rpc CreateProject(CreateProjectRequest) returns (CreateProjectResponse); + + // ### Get A Project + // + // Returns the project with the given project id + // + rpc Project(ProjectRequest) returns (ProjectResponse); + + // ### Update Project Configuration + // + // Apply changes to a project's configuration. + // + // + // #### Configuring Git for a Project + // + // To set up a Looker project with a remote git repository, follow these steps: + // + // 1. Call `update_session` to select the 'dev' workspace. + // 1. Call `create_git_deploy_key` to create a new deploy key for the project + // 1. Copy the deploy key text into the remote git repository's ssh key configuration + // 1. Call `update_project` to set project's `git_remote_url` ()and `git_service_name`, if necessary). + // + // When you modify a project's `git_remote_url`, Looker connects to the remote repository to fetch + // metadata. The remote git repository MUST be configured with the Looker-generated deploy + // key for this project prior to setting the project's `git_remote_url`. + // + // To set up a Looker project with a git repository residing on the Looker server (a 'bare' git repo): + // + // 1. Call `update_session` to select the 'dev' workspace. + // 1. Call `update_project` setting `git_remote_url` to null and `git_service_name` to "bare". + // + // + rpc UpdateProject(UpdateProjectRequest) returns (UpdateProjectResponse); + + // ### Get A Projects Manifest object + // + // Returns the project with the given project id + // + rpc Manifest(ManifestRequest) returns (ManifestResponse); + + // ### Git Deploy Key + // + // Returns the ssh public key previously created for a project's git repository. + // + rpc GitDeployKey(GitDeployKeyRequest) returns (GitDeployKeyResponse); + + // ### Create Git Deploy Key + // + // Create a public/private key pair for authenticating ssh git requests from Looker to a remote git repository + // for a particular Looker project. + // + // Returns the public key of the generated ssh key pair. + // + // Copy this public key to your remote git repository's ssh keys configuration so that the remote git service can + // validate and accept git requests from the Looker server. + // + rpc CreateGitDeployKey(CreateGitDeployKeyRequest) returns (CreateGitDeployKeyResponse); + + // ### Get Cached Project Validation Results + // + // Returns the cached results of a previous project validation calculation, if any. + // Returns http status 204 No Content if no validation results exist. + // + // Validating the content of all the files in a project can be computationally intensive + // for large projects. Use this API to simply fetch the results of the most recent + // project validation rather than revalidating the entire project from scratch. + // + // A value of `"stale": true` in the response indicates that the project has changed since + // the cached validation results were computed. The cached validation results may no longer + // reflect the current state of the project. + // + rpc ProjectValidationResults(ProjectValidationResultsRequest) returns (ProjectValidationResultsResponse); + + // ### Validate Project + // + // Performs lint validation of all lookml files in the project. + // Returns a list of errors found, if any. + // + // Validating the content of all the files in a project can be computationally intensive + // for large projects. For best performance, call `validate_project(project_id)` only + // when you really want to recompute project validation. To quickly display the results of + // the most recent project validation (without recomputing), use `project_validation_results(project_id)` + // + rpc ValidateProject(ValidateProjectRequest) returns (ValidateProjectResponse); + + // ### Get Project Workspace + // + // Returns information about the state of the project files in the currently selected workspace + // + rpc ProjectWorkspace(ProjectWorkspaceRequest) returns (ProjectWorkspaceResponse); + + // ### Get All Project Files + // + // Returns a list of the files in the project + // + rpc AllProjectFiles(AllProjectFilesRequest) returns (AllProjectFilesResponse); + + // ### Get Project File Info + // + // Returns information about a file in the project + // + rpc ProjectFile(ProjectFileRequest) returns (ProjectFileResponse); + + // ### Get All Git Connection Tests + // + // dev mode required. + // - Call `update_session` to select the 'dev' workspace. + // + // Returns a list of tests which can be run against a project's (or the dependency project for the provided remote_url) git connection. Call [Run Git Connection Test](#!/Project/run_git_connection_test) to execute each test in sequence. + // + // Tests are ordered by increasing specificity. Tests should be run in the order returned because later tests require functionality tested by tests earlier in the test list. + // + // For example, a late-stage test for write access is meaningless if connecting to the git server (an early test) is failing. + // + rpc AllGitConnectionTests(AllGitConnectionTestsRequest) returns (AllGitConnectionTestsResponse); + + // ### Run a git connection test + // + // Run the named test on the git service used by this project (or the dependency project for the provided remote_url) and return the result. This + // is intended to help debug git connections when things do not work properly, to give + // more helpful information about why a git url is not working with Looker. + // + // Tests should be run in the order they are returned by [Get All Git Connection Tests](#!/Project/all_git_connection_tests). + // + rpc RunGitConnectionTest(RunGitConnectionTestRequest) returns (RunGitConnectionTestResponse); + + // ### Get All LookML Tests + // + // Returns a list of tests which can be run to validate a project's LookML code and/or the underlying data, + // optionally filtered by the file id. + // Call [Run LookML Test](#!/Project/run_lookml_test) to execute tests. + // + rpc AllLookmlTests(AllLookmlTestsRequest) returns (AllLookmlTestsResponse); + + // ### Run LookML Tests + // + // Runs all tests in the project, optionally filtered by file, test, and/or model. + // + rpc RunLookmlTest(RunLookmlTestRequest) returns (RunLookmlTestResponse); + + // ### Configure Repository Credential for a remote dependency + // + // Admin required. + // + // `root_project_id` is required. + // `credential_id` is required. + // + // + rpc UpdateRepositoryCredential(UpdateRepositoryCredentialRequest) returns (UpdateRepositoryCredentialResponse); + + // ### Repository Credential for a remote dependency + // + // Admin required. + // + // `root_project_id` is required. + // `credential_id` is required. + // + rpc DeleteRepositoryCredential(DeleteRepositoryCredentialRequest) returns (DeleteRepositoryCredentialResponse); + + // ### Get all Repository Credentials for a project + // + // `root_project_id` is required. + // + rpc GetAllRepositoryCredentials(GetAllRepositoryCredentialsRequest) returns (GetAllRepositoryCredentialsResponse); + + + + // Query: Run and Manage Queries + + // ### Create an async query task + // + // Creates a query task (job) to run a previously created query asynchronously. Returns a Query Task ID. + // + // Use [query_task(query_task_id)](#!/Query/query_task) to check the execution status of the query task. + // After the query task status reaches "Complete", use [query_task_results(query_task_id)](#!/Query/query_task_results) to fetch the results of the query. + // + rpc CreateQueryTask(CreateQueryTaskRequest) returns (CreateQueryTaskResponse); + + // ### Fetch results of multiple async queries + // + // Returns the results of multiple async queries in one request. + // + // For Query Tasks that are not completed, the response will include the execution status of the Query Task but will not include query results. + // Query Tasks whose results have expired will have a status of 'expired'. + // If the user making the API request does not have sufficient privileges to view a Query Task result, the result will have a status of 'missing' + // + rpc QueryTaskMultiResults(QueryTaskMultiResultsRequest) returns (QueryTaskMultiResultsResponse); + + // ### Get Query Task details + // + // Use this function to check the status of an async query task. After the status + // reaches "Complete", you can call [query_task_results(query_task_id)](#!/Query/query_task_results) to + // retrieve the results of the query. + // + // Use [create_query_task()](#!/Query/create_query_task) to create an async query task. + // + rpc QueryTask(QueryTaskRequest) returns (QueryTaskResponse); + + // ### Get Async Query Results + // + // Returns the results of an async query task if the query has completed. + // + // If the query task is still running or waiting to run, this function returns 204 No Content. + // + // If the query task ID is invalid or the cached results of the query task have expired, this function returns 404 Not Found. + // + // Use [query_task(query_task_id)](#!/Query/query_task) to check the execution status of the query task + // Call query_task_results only after the query task status reaches "Complete". + // + // You can also use [query_task_multi_results()](#!/Query/query_task_multi_results) retrieve the + // results of multiple async query tasks at the same time. + // + // #### SQL Error Handling: + // If the query fails due to a SQL db error, how this is communicated depends on the result_format you requested in `create_query_task()`. + // + // For `json_detail` result_format: `query_task_results()` will respond with HTTP status '200 OK' and db SQL error info + // will be in the `errors` property of the response object. The 'data' property will be empty. + // + // For all other result formats: `query_task_results()` will respond with HTTP status `400 Bad Request` and some db SQL error info + // will be in the message of the 400 error response, but not as detailed as expressed in `json_detail.errors`. + // These data formats can only carry row data, and error info is not row data. + // + rpc QueryTaskResults(QueryTaskResultsRequest) returns (QueryTaskResultsResponse); + + // ### Get a previously created query by id. + // + // A Looker query object includes the various parameters that define a database query that has been run or + // could be run in the future. These parameters include: model, view, fields, filters, pivots, etc. + // Query *results* are not part of the query object. + // + // Query objects are unique and immutable. Query objects are created automatically in Looker as users explore data. + // Looker does not delete them; they become part of the query history. When asked to create a query for + // any given set of parameters, Looker will first try to find an existing query object with matching + // parameters and will only create a new object when an appropriate object can not be found. + // + // This 'get' method is used to get the details about a query for a given id. See the other methods here + // to 'create' and 'run' queries. + // + // Note that some fields like 'filter_config' and 'vis_config' etc are specific to how the Looker UI + // builds queries and visualizations and are not generally useful for API use. They are not required when + // creating new queries and can usually just be ignored. + // + // + rpc Query(QueryRequest) returns (QueryResponse); + + // ### Get the query for a given query slug. + // + // This returns the query for the 'slug' in a query share URL. + // + // The 'slug' is a randomly chosen short string that is used as an alternative to the query's id value + // for use in URLs etc. This method exists as a convenience to help you use the API to 'find' queries that + // have been created using the Looker UI. + // + // You can use the Looker explore page to build a query and then choose the 'Share' option to + // show the share url for the query. Share urls generally look something like 'https://looker.yourcompany/x/vwGSbfc'. + // The trailing 'vwGSbfc' is the share slug. You can pass that string to this api method to get details about the query. + // Those details include the 'id' that you can use to run the query. Or, you can copy the query body + // (perhaps with your own modification) and use that as the basis to make/run new queries. + // + // This will also work with slugs from Looker explore urls like + // 'https://looker.yourcompany/explore/ecommerce/orders?qid=aogBgL6o3cKK1jN3RoZl5s'. In this case + // 'aogBgL6o3cKK1jN3RoZl5s' is the slug. + // + rpc QueryForSlug(QueryForSlugRequest) returns (QueryForSlugResponse); + + // ### Create a query. + // + // This allows you to create a new query that you can later run. Looker queries are immutable once created + // and are not deleted. If you create a query that is exactly like an existing query then the existing query + // will be returned and no new query will be created. Whether a new query is created or not, you can use + // the 'id' in the returned query with the 'run' method. + // + // The query parameters are passed as json in the body of the request. + // + // + rpc CreateQuery(CreateQueryRequest) returns (CreateQueryResponse); + + // ### Run a saved query. + // + // This runs a previously saved query. You can use this on a query that was generated in the Looker UI + // or one that you have explicitly created using the API. You can also use a query 'id' from a saved 'Look'. + // + // The 'result_format' parameter specifies the desired structure and format of the response. + // + // Supported formats: + // + // | result_format | Description + // | :-----------: | :--- | + // | json | Plain json + // | json_detail | Row data plus metadata describing the fields, pivots, table calcs, and other aspects of the query + // | csv | Comma separated values with a header + // | txt | Tab separated values with a header + // | html | Simple html + // | md | Simple markdown + // | xlsx | MS Excel spreadsheet + // | sql | Returns the generated SQL rather than running the query + // | png | A PNG image of the visualization of the query + // | jpg | A JPG image of the visualization of the query + // + // + // + rpc RunQuery(RunQueryRequest) returns (RunQueryResponse); + + // ### Run the query that is specified inline in the posted body. + // + // This allows running a query as defined in json in the posted body. This combines + // the two actions of posting & running a query into one step. + // + // Here is an example body in json: + // ``` + // { + // "model":"thelook", + // "view":"inventory_items", + // "fields":["category.name","inventory_items.days_in_inventory_tier","products.count"], + // "filters":{"category.name":"socks"}, + // "sorts":["products.count desc 0"], + // "limit":"500", + // "query_timezone":"America/Los_Angeles" + // } + // ``` + // + // When using the Ruby SDK this would be passed as a Ruby hash like: + // ``` + // { + // :model=>"thelook", + // :view=>"inventory_items", + // :fields=> + // ["category.name", + // "inventory_items.days_in_inventory_tier", + // "products.count"], + // :filters=>{:"category.name"=>"socks"}, + // :sorts=>["products.count desc 0"], + // :limit=>"500", + // :query_timezone=>"America/Los_Angeles", + // } + // ``` + // + // This will return the result of running the query in the format specified by the 'result_format' parameter. + // + // Supported formats: + // + // | result_format | Description + // | :-----------: | :--- | + // | json | Plain json + // | json_detail | Row data plus metadata describing the fields, pivots, table calcs, and other aspects of the query + // | csv | Comma separated values with a header + // | txt | Tab separated values with a header + // | html | Simple html + // | md | Simple markdown + // | xlsx | MS Excel spreadsheet + // | sql | Returns the generated SQL rather than running the query + // | png | A PNG image of the visualization of the query + // | jpg | A JPG image of the visualization of the query + // + // + // + rpc RunInlineQuery(RunInlineQueryRequest) returns (RunInlineQueryResponse); + + // ### Run an URL encoded query. + // + // This requires the caller to encode the specifiers for the query into the URL query part using + // Looker-specific syntax as explained below. + // + // Generally, you would want to use one of the methods that takes the parameters as json in the POST body + // for creating and/or running queries. This method exists for cases where one really needs to encode the + // parameters into the URL of a single 'GET' request. This matches the way that the Looker UI formats + // 'explore' URLs etc. + // + // The parameters here are very similar to the json body formatting except that the filter syntax is + // tricky. Unfortunately, this format makes this method not currently callable via the 'Try it out!' button + // in this documentation page. But, this is callable when creating URLs manually or when using the Looker SDK. + // + // Here is an example inline query URL: + // + // ``` + // https://looker.mycompany.com:19999/api/3.0/queries/models/thelook/views/inventory_items/run/json?fields=category.name,inventory_items.days_in_inventory_tier,products.count&f[category.name]=socks&sorts=products.count+desc+0&limit=500&query_timezone=America/Los_Angeles + // ``` + // + // When invoking this endpoint with the Ruby SDK, pass the query parameter parts as a hash. The hash to match the above would look like: + // + // ```ruby + // query_params = + // { + // :fields => "category.name,inventory_items.days_in_inventory_tier,products.count", + // :"f[category.name]" => "socks", + // :sorts => "products.count desc 0", + // :limit => "500", + // :query_timezone => "America/Los_Angeles" + // } + // response = ruby_sdk.run_url_encoded_query('thelook','inventory_items','json', query_params) + // + // ``` + // + // Again, it is generally easier to use the variant of this method that passes the full query in the POST body. + // This method is available for cases where other alternatives won't fit the need. + // + // Supported formats: + // + // | result_format | Description + // | :-----------: | :--- | + // | json | Plain json + // | json_detail | Row data plus metadata describing the fields, pivots, table calcs, and other aspects of the query + // | csv | Comma separated values with a header + // | txt | Tab separated values with a header + // | html | Simple html + // | md | Simple markdown + // | xlsx | MS Excel spreadsheet + // | sql | Returns the generated SQL rather than running the query + // | png | A PNG image of the visualization of the query + // | jpg | A JPG image of the visualization of the query + // + // + // + rpc RunUrlEncodedQuery(RunUrlEncodedQueryRequest) returns (RunUrlEncodedQueryResponse); + + // ### Get Merge Query + // + // Returns a merge query object given its id. + // + rpc MergeQuery(MergeQueryRequest) returns (MergeQueryResponse); + + // ### Create Merge Query + // + // Creates a new merge query object. + // + // A merge query takes the results of one or more queries and combines (merges) the results + // according to field mapping definitions. The result is similar to a SQL left outer join. + // + // A merge query can merge results of queries from different SQL databases. + // + // The order that queries are defined in the source_queries array property is significant. The + // first query in the array defines the primary key into which the results of subsequent + // queries will be merged. + // + // Like model/view query objects, merge queries are immutable and have structural identity - if + // you make a request to create a new merge query that is identical to an existing merge query, + // the existing merge query will be returned instead of creating a duplicate. Conversely, any + // change to the contents of a merge query will produce a new object with a new id. + // + rpc CreateMergeQuery(CreateMergeQueryRequest) returns (CreateMergeQueryResponse); + + // Get information about all running queries. + // + rpc AllRunningQueries(AllRunningQueriesRequest) returns (AllRunningQueriesResponse); + + // Kill a query with a specific query_task_id. + // + rpc KillQuery(KillQueryRequest) returns (KillQueryResponse); + + // Get a SQL Runner query. + rpc SqlQuery(SqlQueryRequest) returns (SqlQueryResponse); + + // ### Create a SQL Runner Query + // + // Either the `connection_name` or `model_name` parameter MUST be provided. + // + rpc CreateSqlQuery(CreateSqlQueryRequest) returns (CreateSqlQueryResponse); + + // Execute a SQL Runner query in a given result_format. + rpc RunSqlQuery(RunSqlQueryRequest) returns (RunSqlQueryResponse); + + + + // RenderTask: Manage Render Tasks + + // ### Create a new task to render a look to an image. + // + // Returns a render task object. + // To check the status of a render task, pass the render_task.id to [Get Render Task](#!/RenderTask/get_render_task). + // Once the render task is complete, you can download the resulting document or image using [Get Render Task Results](#!/RenderTask/get_render_task_results). + // + // + rpc CreateLookRenderTask(CreateLookRenderTaskRequest) returns (CreateLookRenderTaskResponse); + + // ### Create a new task to render an existing query to an image. + // + // Returns a render task object. + // To check the status of a render task, pass the render_task.id to [Get Render Task](#!/RenderTask/get_render_task). + // Once the render task is complete, you can download the resulting document or image using [Get Render Task Results](#!/RenderTask/get_render_task_results). + // + // + rpc CreateQueryRenderTask(CreateQueryRenderTaskRequest) returns (CreateQueryRenderTaskResponse); + + // ### Create a new task to render a dashboard to a document or image. + // + // Returns a render task object. + // To check the status of a render task, pass the render_task.id to [Get Render Task](#!/RenderTask/get_render_task). + // Once the render task is complete, you can download the resulting document or image using [Get Render Task Results](#!/RenderTask/get_render_task_results). + // + // + rpc CreateDashboardRenderTask(CreateDashboardRenderTaskRequest) returns (CreateDashboardRenderTaskResponse); + + // ### Get information about a render task. + // + // Returns a render task object. + // To check the status of a render task, pass the render_task.id to [Get Render Task](#!/RenderTask/get_render_task). + // Once the render task is complete, you can download the resulting document or image using [Get Render Task Results](#!/RenderTask/get_render_task_results). + // + // + rpc RenderTask(RenderTaskRequest) returns (RenderTaskResponse); + + // ### Get the document or image produced by a completed render task. + // + // Note that the PDF or image result will be a binary blob in the HTTP response, as indicated by the + // Content-Type in the response headers. This may require specialized (or at least different) handling than text + // responses such as JSON. You may need to tell your HTTP client that the response is binary so that it does not + // attempt to parse the binary data as text. + // + // If the render task exists but has not finished rendering the results, the response HTTP status will be + // **202 Accepted**, the response body will be empty, and the response will have a Retry-After header indicating + // that the caller should repeat the request at a later time. + // + // Returns 404 if the render task cannot be found, if the cached result has expired, or if the caller + // does not have permission to view the results. + // + // For detailed information about the status of the render task, use [Render Task](#!/RenderTask/render_task). + // Polling loops waiting for completion of a render task would be better served by polling **render_task(id)** until + // the task status reaches completion (or error) instead of polling **render_task_results(id)** alone. + // + rpc RenderTaskResults(RenderTaskResultsRequest) returns (RenderTaskResultsResponse); + + + + // Role: Manage Roles + + // ### Search model sets + // Returns all model set records that match the given search criteria. + // If multiple search params are given and `filter_or` is FALSE or not specified, + // search params are combined in a logical AND operation. + // Only rows that match *all* search param criteria will be returned. + // + // If `filter_or` is TRUE, multiple search params are combined in a logical OR operation. + // Results will include rows that match **any** of the search criteria. + // + // String search params use case-insensitive matching. + // String search params can contain `%` and '_' as SQL LIKE pattern match wildcard expressions. + // example="dan%" will match "danger" and "Danzig" but not "David" + // example="D_m%" will match "Damage" and "dump" + // + // Integer search params can accept a single value or a comma separated list of values. The multiple + // values will be combined under a logical OR operation - results will match at least one of + // the given values. + // + // Most search params can accept "IS NULL" and "NOT NULL" as special expressions to match + // or exclude (respectively) rows where the column is null. + // + // Boolean search params accept only "true" and "false" as values. + // + // + rpc SearchModelSets(SearchModelSetsRequest) returns (SearchModelSetsResponse); + + // ### Get information about the model set with a specific id. + // + rpc ModelSet(ModelSetRequest) returns (ModelSetResponse); + + // ### Update information about the model set with a specific id. + // + rpc UpdateModelSet(UpdateModelSetRequest) returns (UpdateModelSetResponse); + + // ### Delete the model set with a specific id. + // + rpc DeleteModelSet(DeleteModelSetRequest) returns (DeleteModelSetResponse); + + // ### Get information about all model sets. + // + rpc AllModelSets(AllModelSetsRequest) returns (AllModelSetsResponse); + + // ### Create a model set with the specified information. Model sets are used by Roles. + // + rpc CreateModelSet(CreateModelSetRequest) returns (CreateModelSetResponse); + + // ### Get all supported permissions. + // + rpc AllPermissions(AllPermissionsRequest) returns (AllPermissionsResponse); + + // ### Search permission sets + // Returns all permission set records that match the given search criteria. + // If multiple search params are given and `filter_or` is FALSE or not specified, + // search params are combined in a logical AND operation. + // Only rows that match *all* search param criteria will be returned. + // + // If `filter_or` is TRUE, multiple search params are combined in a logical OR operation. + // Results will include rows that match **any** of the search criteria. + // + // String search params use case-insensitive matching. + // String search params can contain `%` and '_' as SQL LIKE pattern match wildcard expressions. + // example="dan%" will match "danger" and "Danzig" but not "David" + // example="D_m%" will match "Damage" and "dump" + // + // Integer search params can accept a single value or a comma separated list of values. The multiple + // values will be combined under a logical OR operation - results will match at least one of + // the given values. + // + // Most search params can accept "IS NULL" and "NOT NULL" as special expressions to match + // or exclude (respectively) rows where the column is null. + // + // Boolean search params accept only "true" and "false" as values. + // + // + rpc SearchPermissionSets(SearchPermissionSetsRequest) returns (SearchPermissionSetsResponse); + + // ### Get information about the permission set with a specific id. + // + rpc PermissionSet(PermissionSetRequest) returns (PermissionSetResponse); + + // ### Update information about the permission set with a specific id. + // + rpc UpdatePermissionSet(UpdatePermissionSetRequest) returns (UpdatePermissionSetResponse); + + // ### Delete the permission set with a specific id. + // + rpc DeletePermissionSet(DeletePermissionSetRequest) returns (DeletePermissionSetResponse); + + // ### Get information about all permission sets. + // + rpc AllPermissionSets(AllPermissionSetsRequest) returns (AllPermissionSetsResponse); + + // ### Create a permission set with the specified information. Permission sets are used by Roles. + // + rpc CreatePermissionSet(CreatePermissionSetRequest) returns (CreatePermissionSetResponse); + + // ### Get information about all roles. + // + rpc AllRoles(AllRolesRequest) returns (AllRolesResponse); + + // ### Create a role with the specified information. + // + rpc CreateRole(CreateRoleRequest) returns (CreateRoleResponse); + + // ### Search roles + // + // Returns all role records that match the given search criteria. + // + // If multiple search params are given and `filter_or` is FALSE or not specified, + // search params are combined in a logical AND operation. + // Only rows that match *all* search param criteria will be returned. + // + // If `filter_or` is TRUE, multiple search params are combined in a logical OR operation. + // Results will include rows that match **any** of the search criteria. + // + // String search params use case-insensitive matching. + // String search params can contain `%` and '_' as SQL LIKE pattern match wildcard expressions. + // example="dan%" will match "danger" and "Danzig" but not "David" + // example="D_m%" will match "Damage" and "dump" + // + // Integer search params can accept a single value or a comma separated list of values. The multiple + // values will be combined under a logical OR operation - results will match at least one of + // the given values. + // + // Most search params can accept "IS NULL" and "NOT NULL" as special expressions to match + // or exclude (respectively) rows where the column is null. + // + // Boolean search params accept only "true" and "false" as values. + // + // + rpc SearchRoles(SearchRolesRequest) returns (SearchRolesResponse); + + // ### Get information about the role with a specific id. + // + rpc Role(RoleRequest) returns (RoleResponse); + + // ### Update information about the role with a specific id. + // + rpc UpdateRole(UpdateRoleRequest) returns (UpdateRoleResponse); + + // ### Delete the role with a specific id. + // + rpc DeleteRole(DeleteRoleRequest) returns (DeleteRoleResponse); + + // ### Get information about all the groups with the role that has a specific id. + // + rpc RoleGroups(RoleGroupsRequest) returns (RoleGroupsResponse); + + // ### Set all groups for a role, removing all existing group associations from that role. + // + rpc SetRoleGroups(SetRoleGroupsRequest) returns (SetRoleGroupsResponse); + + // ### Get information about all the users with the role that has a specific id. + // + rpc RoleUsers(RoleUsersRequest) returns (RoleUsersResponse); + + // ### Set all the users of the role with a specific id. + // + rpc SetRoleUsers(SetRoleUsersRequest) returns (SetRoleUsersResponse); + + + + // ScheduledPlan: Manage Scheduled Plans + + // ### Get Scheduled Plans for a Space + // + // Returns scheduled plans owned by the caller for a given space id. + // + rpc ScheduledPlansForSpace(ScheduledPlansForSpaceRequest) returns (ScheduledPlansForSpaceResponse); + + // ### Get Information About a Scheduled Plan + // + // Admins can fetch information about other users' Scheduled Plans. + // + rpc ScheduledPlan(ScheduledPlanRequest) returns (ScheduledPlanResponse); + + // ### Update a Scheduled Plan + // + // Admins can update other users' Scheduled Plans. + // + // Note: Any scheduled plan destinations specified in an update will **replace** all scheduled plan destinations + // currently defined for the scheduled plan. + // + // For Example: If a scheduled plan has destinations A, B, and C, and you call update on this scheduled plan + // specifying only B in the destinations, then destinations A and C will be deleted by the update. + // + // Updating a scheduled plan to assign null or an empty array to the scheduled_plan_destinations property is an error, as a scheduled plan must always have at least one destination. + // + // If you omit the scheduled_plan_destinations property from the object passed to update, then the destinations + // defined on the original scheduled plan will remain unchanged. + // + // #### Email Permissions: + // + // For details about permissions required to schedule delivery to email and the safeguards + // Looker offers to protect against sending to unauthorized email destinations, see [Email Domain Whitelist for Scheduled Looks](https://docs.looker.com/r/api/embed-permissions). + // + // + // #### Scheduled Plan Destination Formats + // + // Scheduled plan destinations must specify the data format to produce and send to the destination. + // + // Formats: + // + // | format | Description + // | :-----------: | :--- | + // | json | A JSON object containing a `data` property which contains an array of JSON objects, one per row. No metadata. + // | json_detail | Row data plus metadata describing the fields, pivots, table calcs, and other aspects of the query + // | inline_json | Same as the JSON format, except that the `data` property is a string containing JSON-escaped row data. Additional properties describe the data operation. This format is primarily used to send data to web hooks so that the web hook doesn't have to re-encode the JSON row data in order to pass it on to its ultimate destination. + // | csv | Comma separated values with a header + // | txt | Tab separated values with a header + // | html | Simple html + // | xlsx | MS Excel spreadsheet + // | wysiwyg_pdf | Dashboard rendered in a tiled layout to produce a PDF document + // | assembled_pdf | Dashboard rendered in a single column layout to produce a PDF document + // | wysiwyg_png | Dashboard rendered in a tiled layout to produce a PNG image + // || + // + // Valid formats vary by destination type and source object. `wysiwyg_pdf` is only valid for dashboards, for example. + // + // + // + rpc UpdateScheduledPlan(UpdateScheduledPlanRequest) returns (UpdateScheduledPlanResponse); + + // ### Delete a Scheduled Plan + // + // Normal users can only delete their own scheduled plans. + // Admins can delete other users' scheduled plans. + // This delete cannot be undone. + // + rpc DeleteScheduledPlan(DeleteScheduledPlanRequest) returns (DeleteScheduledPlanResponse); + + // ### List All Scheduled Plans + // + // Returns all scheduled plans which belong to the caller or given user. + // + // If no user_id is provided, this function returns the scheduled plans owned by the caller. + // + // + // To list all schedules for all users, pass `all_users=true`. + // + // + // The caller must have `see_schedules` permission to see other users' scheduled plans. + // + // + // + rpc AllScheduledPlans(AllScheduledPlansRequest) returns (AllScheduledPlansResponse); + + // ### Create a Scheduled Plan + // + // Create a scheduled plan to render a Look or Dashboard on a recurring schedule. + // + // To create a scheduled plan, you MUST provide values for the following fields: + // `name` + // and + // `look_id`, `dashboard_id`, `lookml_dashboard_id`, or `query_id` + // and + // `cron_tab` or `datagroup` + // and + // at least one scheduled_plan_destination + // + // A scheduled plan MUST have at least one scheduled_plan_destination defined. + // + // When `look_id` is set, `require_no_results`, `require_results`, and `require_change` are all required. + // + // If `create_scheduled_plan` fails with a 422 error, be sure to look at the error messages in the response which will explain exactly what fields are missing or values that are incompatible. + // + // The queries that provide the data for the look or dashboard are run in the context of user account that owns the scheduled plan. + // + // When `run_as_recipient` is `false` or not specified, the queries that provide the data for the + // look or dashboard are run in the context of user account that owns the scheduled plan. + // + // When `run_as_recipient` is `true` and all the email recipients are Looker user accounts, the + // queries are run in the context of each recipient, so different recipients may see different + // data from the same scheduled render of a look or dashboard. For more details, see [Run As Recipient](https://looker.com/docs/r/admin/run-as-recipient). + // + // Admins can create and modify scheduled plans on behalf of other users by specifying a user id. + // Non-admin users may not create or modify scheduled plans by or for other users. + // + // #### Email Permissions: + // + // For details about permissions required to schedule delivery to email and the safeguards + // Looker offers to protect against sending to unauthorized email destinations, see [Email Domain Whitelist for Scheduled Looks](https://docs.looker.com/r/api/embed-permissions). + // + // + // #### Scheduled Plan Destination Formats + // + // Scheduled plan destinations must specify the data format to produce and send to the destination. + // + // Formats: + // + // | format | Description + // | :-----------: | :--- | + // | json | A JSON object containing a `data` property which contains an array of JSON objects, one per row. No metadata. + // | json_detail | Row data plus metadata describing the fields, pivots, table calcs, and other aspects of the query + // | inline_json | Same as the JSON format, except that the `data` property is a string containing JSON-escaped row data. Additional properties describe the data operation. This format is primarily used to send data to web hooks so that the web hook doesn't have to re-encode the JSON row data in order to pass it on to its ultimate destination. + // | csv | Comma separated values with a header + // | txt | Tab separated values with a header + // | html | Simple html + // | xlsx | MS Excel spreadsheet + // | wysiwyg_pdf | Dashboard rendered in a tiled layout to produce a PDF document + // | assembled_pdf | Dashboard rendered in a single column layout to produce a PDF document + // | wysiwyg_png | Dashboard rendered in a tiled layout to produce a PNG image + // || + // + // Valid formats vary by destination type and source object. `wysiwyg_pdf` is only valid for dashboards, for example. + // + // + // + rpc CreateScheduledPlan(CreateScheduledPlanRequest) returns (CreateScheduledPlanResponse); + + // ### Run a Scheduled Plan Immediately + // + // Create a scheduled plan that runs only once, and immediately. + // + // This can be useful for testing a Scheduled Plan before committing to a production schedule. + // + // Admins can create scheduled plans on behalf of other users by specifying a user id. + // + // This API is rate limited to prevent it from being used for relay spam or DoS attacks + // + // #### Email Permissions: + // + // For details about permissions required to schedule delivery to email and the safeguards + // Looker offers to protect against sending to unauthorized email destinations, see [Email Domain Whitelist for Scheduled Looks](https://docs.looker.com/r/api/embed-permissions). + // + // + // #### Scheduled Plan Destination Formats + // + // Scheduled plan destinations must specify the data format to produce and send to the destination. + // + // Formats: + // + // | format | Description + // | :-----------: | :--- | + // | json | A JSON object containing a `data` property which contains an array of JSON objects, one per row. No metadata. + // | json_detail | Row data plus metadata describing the fields, pivots, table calcs, and other aspects of the query + // | inline_json | Same as the JSON format, except that the `data` property is a string containing JSON-escaped row data. Additional properties describe the data operation. This format is primarily used to send data to web hooks so that the web hook doesn't have to re-encode the JSON row data in order to pass it on to its ultimate destination. + // | csv | Comma separated values with a header + // | txt | Tab separated values with a header + // | html | Simple html + // | xlsx | MS Excel spreadsheet + // | wysiwyg_pdf | Dashboard rendered in a tiled layout to produce a PDF document + // | assembled_pdf | Dashboard rendered in a single column layout to produce a PDF document + // | wysiwyg_png | Dashboard rendered in a tiled layout to produce a PNG image + // || + // + // Valid formats vary by destination type and source object. `wysiwyg_pdf` is only valid for dashboards, for example. + // + // + // + rpc ScheduledPlanRunOnce(ScheduledPlanRunOnceRequest) returns (ScheduledPlanRunOnceResponse); + + // ### Get Scheduled Plans for a Look + // + // Returns all scheduled plans for a look which belong to the caller or given user. + // + // If no user_id is provided, this function returns the scheduled plans owned by the caller. + // + // + // To list all schedules for all users, pass `all_users=true`. + // + // + // The caller must have `see_schedules` permission to see other users' scheduled plans. + // + // + // + rpc ScheduledPlansForLook(ScheduledPlansForLookRequest) returns (ScheduledPlansForLookResponse); + + // ### Get Scheduled Plans for a Dashboard + // + // Returns all scheduled plans for a dashboard which belong to the caller or given user. + // + // If no user_id is provided, this function returns the scheduled plans owned by the caller. + // + // + // To list all schedules for all users, pass `all_users=true`. + // + // + // The caller must have `see_schedules` permission to see other users' scheduled plans. + // + // + // + rpc ScheduledPlansForDashboard(ScheduledPlansForDashboardRequest) returns (ScheduledPlansForDashboardResponse); + + // ### Get Scheduled Plans for a LookML Dashboard + // + // Returns all scheduled plans for a LookML Dashboard which belong to the caller or given user. + // + // If no user_id is provided, this function returns the scheduled plans owned by the caller. + // + // + // To list all schedules for all users, pass `all_users=true`. + // + // + // The caller must have `see_schedules` permission to see other users' scheduled plans. + // + // + // + rpc ScheduledPlansForLookmlDashboard(ScheduledPlansForLookmlDashboardRequest) returns (ScheduledPlansForLookmlDashboardResponse); + + // ### Run a Scheduled Plan By Id Immediately + // This function creates a run-once schedule plan based on an existing scheduled plan, + // applies modifications (if any) to the new scheduled plan, and runs the new schedule plan immediately. + // This can be useful for testing modifications to an existing scheduled plan before committing to a production schedule. + // + // This function internally performs the following operations: + // + // 1. Copies the properties of the existing scheduled plan into a new scheduled plan + // 2. Copies any properties passed in the JSON body of this request into the new scheduled plan (replacing the original values) + // 3. Creates the new scheduled plan + // 4. Runs the new scheduled plan + // + // The original scheduled plan is not modified by this operation. + // Admins can create, modify, and run scheduled plans on behalf of other users by specifying a user id. + // Non-admins can only create, modify, and run their own scheduled plans. + // + // #### Email Permissions: + // + // For details about permissions required to schedule delivery to email and the safeguards + // Looker offers to protect against sending to unauthorized email destinations, see [Email Domain Whitelist for Scheduled Looks](https://docs.looker.com/r/api/embed-permissions). + // + // + // #### Scheduled Plan Destination Formats + // + // Scheduled plan destinations must specify the data format to produce and send to the destination. + // + // Formats: + // + // | format | Description + // | :-----------: | :--- | + // | json | A JSON object containing a `data` property which contains an array of JSON objects, one per row. No metadata. + // | json_detail | Row data plus metadata describing the fields, pivots, table calcs, and other aspects of the query + // | inline_json | Same as the JSON format, except that the `data` property is a string containing JSON-escaped row data. Additional properties describe the data operation. This format is primarily used to send data to web hooks so that the web hook doesn't have to re-encode the JSON row data in order to pass it on to its ultimate destination. + // | csv | Comma separated values with a header + // | txt | Tab separated values with a header + // | html | Simple html + // | xlsx | MS Excel spreadsheet + // | wysiwyg_pdf | Dashboard rendered in a tiled layout to produce a PDF document + // | assembled_pdf | Dashboard rendered in a single column layout to produce a PDF document + // | wysiwyg_png | Dashboard rendered in a tiled layout to produce a PNG image + // || + // + // Valid formats vary by destination type and source object. `wysiwyg_pdf` is only valid for dashboards, for example. + // + // + // + // This API is rate limited to prevent it from being used for relay spam or DoS attacks + // + // + rpc ScheduledPlanRunOnceById(ScheduledPlanRunOnceByIdRequest) returns (ScheduledPlanRunOnceByIdResponse); + + + + // Session: Session Information + + // ### Get API Session + // + // Returns information about the current API session, such as which workspace is selected for the session. + // + rpc Session(SessionRequest) returns (SessionResponse); + + // ### Update API Session + // + // #### API Session Workspace + // + // You can use this endpoint to change the active workspace for the current API session. + // + // Only one workspace can be active in a session. The active workspace can be changed + // any number of times in a session. + // + // The default workspace for API sessions is the "production" workspace. + // + // All Looker APIs that use projects or lookml models (such as running queries) will + // use the version of project and model files defined by this workspace for the lifetime of the + // current API session or until the session workspace is changed again. + // + // An API session has the same lifetime as the access_token used to authenticate API requests. Each successful + // API login generates a new access_token and a new API session. + // + // If your Looker API client application needs to work in a dev workspace across multiple + // API sessions, be sure to select the dev workspace after each login. + // + rpc UpdateSession(UpdateSessionRequest) returns (UpdateSessionResponse); + + + + // Theme: Manage Themes + + // ### Get an array of all existing themes + // + // Get a **single theme** by id with [Theme](#!/Theme/theme) + // + // This method returns an array of all existing themes. The active time for the theme is not considered. + // + // **Note**: Custom themes needs to be enabled by Looker. Unless custom themes are enabled, only the automatically generated default theme can be used. Please contact your Account Manager or help.looker.com to update your license for this feature. + // + // + rpc AllThemes(AllThemesRequest) returns (AllThemesResponse); + + // ### Create a theme + // + // Creates a new theme object, returning the theme details, including the created id. + // + // If `settings` are not specified, the default theme settings will be copied into the new theme. + // + // The theme `name` can only contain alphanumeric characters or underscores. Theme names should not contain any confidential information, such as customer names. + // + // **Update** an existing theme with [Update Theme](#!/Theme/update_theme) + // + // **Permanently delete** an existing theme with [Delete Theme](#!/Theme/delete_theme) + // + // For more information, see [Creating and Applying Themes](https://looker.com/docs/r/admin/themes). + // + // **Note**: Custom themes needs to be enabled by Looker. Unless custom themes are enabled, only the automatically generated default theme can be used. Please contact your Account Manager or help.looker.com to update your license for this feature. + // + // + rpc CreateTheme(CreateThemeRequest) returns (CreateThemeResponse); + + // ### Search all themes for matching criteria. + // + // Returns an **array of theme objects** that match the specified search criteria. + // + // | Search Parameters | Description + // | :-------------------: | :------ | + // | `begin_at` only | Find themes active at or after `begin_at` + // | `end_at` only | Find themes active at or before `end_at` + // | both set | Find themes with an active inclusive period between `begin_at` and `end_at` + // + // Note: Range matching requires boolean AND logic. + // When using `begin_at` and `end_at` together, do not use `filter_or`=TRUE + // + // If multiple search params are given and `filter_or` is FALSE or not specified, + // search params are combined in a logical AND operation. + // Only rows that match *all* search param criteria will be returned. + // + // If `filter_or` is TRUE, multiple search params are combined in a logical OR operation. + // Results will include rows that match **any** of the search criteria. + // + // String search params use case-insensitive matching. + // String search params can contain `%` and '_' as SQL LIKE pattern match wildcard expressions. + // example="dan%" will match "danger" and "Danzig" but not "David" + // example="D_m%" will match "Damage" and "dump" + // + // Integer search params can accept a single value or a comma separated list of values. The multiple + // values will be combined under a logical OR operation - results will match at least one of + // the given values. + // + // Most search params can accept "IS NULL" and "NOT NULL" as special expressions to match + // or exclude (respectively) rows where the column is null. + // + // Boolean search params accept only "true" and "false" as values. + // + // + // Get a **single theme** by id with [Theme](#!/Theme/theme) + // + // **Note**: Custom themes needs to be enabled by Looker. Unless custom themes are enabled, only the automatically generated default theme can be used. Please contact your Account Manager or help.looker.com to update your license for this feature. + // + // + rpc SearchThemes(SearchThemesRequest) returns (SearchThemesResponse); + + // ### Get the default theme + // + // Returns the active theme object set as the default. + // + // The **default** theme name can be set in the UI on the Admin|Theme UI page + // + // The optional `ts` parameter can specify a different timestamp than "now." If specified, it returns the default theme at the time indicated. + // + rpc DefaultTheme(DefaultThemeRequest) returns (DefaultThemeResponse); + + // ### Set the global default theme by theme name + // + // Only Admin users can call this function. + // + // Only an active theme with no expiration (`end_at` not set) can be assigned as the default theme. As long as a theme has an active record with no expiration, it can be set as the default. + // + // [Create Theme](#!/Theme/create) has detailed information on rules for default and active themes + // + // Returns the new specified default theme object. + // + // **Note**: Custom themes needs to be enabled by Looker. Unless custom themes are enabled, only the automatically generated default theme can be used. Please contact your Account Manager or help.looker.com to update your license for this feature. + // + // + rpc SetDefaultTheme(SetDefaultThemeRequest) returns (SetDefaultThemeResponse); + + // ### Get active themes + // + // Returns an array of active themes. + // + // If the `name` parameter is specified, it will return an array with one theme if it's active and found. + // + // The optional `ts` parameter can specify a different timestamp than "now." + // + // **Note**: Custom themes needs to be enabled by Looker. Unless custom themes are enabled, only the automatically generated default theme can be used. Please contact your Account Manager or help.looker.com to update your license for this feature. + // + // + // + rpc ActiveThemes(ActiveThemesRequest) returns (ActiveThemesResponse); + + // ### Get the named theme if it's active. Otherwise, return the default theme + // + // The optional `ts` parameter can specify a different timestamp than "now." + // Note: API users with `show` ability can call this function + // + // **Note**: Custom themes needs to be enabled by Looker. Unless custom themes are enabled, only the automatically generated default theme can be used. Please contact your Account Manager or help.looker.com to update your license for this feature. + // + // + rpc ThemeOrDefault(ThemeOrDefaultRequest) returns (ThemeOrDefaultResponse); + + // ### Validate a theme with the specified information + // + // Validates all values set for the theme, returning any errors encountered, or 200 OK if valid + // + // See [Create Theme](#!/Theme/create_theme) for constraints + // + // **Note**: Custom themes needs to be enabled by Looker. Unless custom themes are enabled, only the automatically generated default theme can be used. Please contact your Account Manager or help.looker.com to update your license for this feature. + // + // + rpc ValidateTheme(ValidateThemeRequest) returns (ValidateThemeResponse); + + // ### Get a theme by ID + // + // Use this to retrieve a specific theme, whether or not it's currently active. + // + // **Note**: Custom themes needs to be enabled by Looker. Unless custom themes are enabled, only the automatically generated default theme can be used. Please contact your Account Manager or help.looker.com to update your license for this feature. + // + // + rpc Theme(ThemeRequest) returns (ThemeResponse); + + // ### Update the theme by id. + // + // **Note**: Custom themes needs to be enabled by Looker. Unless custom themes are enabled, only the automatically generated default theme can be used. Please contact your Account Manager or help.looker.com to update your license for this feature. + // + // + rpc UpdateTheme(UpdateThemeRequest) returns (UpdateThemeResponse); + + // ### Delete a specific theme by id + // + // This operation permanently deletes the identified theme from the database. + // + // Because multiple themes can have the same name (with different activation time spans) themes can only be deleted by ID. + // + // All IDs associated with a theme name can be retrieved by searching for the theme name with [Theme Search](#!/Theme/search). + // + // **Note**: Custom themes needs to be enabled by Looker. Unless custom themes are enabled, only the automatically generated default theme can be used. Please contact your Account Manager or help.looker.com to update your license for this feature. + // + // + rpc DeleteTheme(DeleteThemeRequest) returns (DeleteThemeResponse); + + + + // User: Manage Users + + // ### Get information about the current user; i.e. the user account currently calling the API. + // + rpc Me(MeRequest) returns (MeResponse); + + // ### Get information about all users. + // + rpc AllUsers(AllUsersRequest) returns (AllUsersResponse); + + // ### Create a user with the specified information. + // + rpc CreateUser(CreateUserRequest) returns (CreateUserResponse); + + // ### Search users + // + // Returns all* user records that match the given search criteria. + // + // If multiple search params are given and `filter_or` is FALSE or not specified, + // search params are combined in a logical AND operation. + // Only rows that match *all* search param criteria will be returned. + // + // If `filter_or` is TRUE, multiple search params are combined in a logical OR operation. + // Results will include rows that match **any** of the search criteria. + // + // String search params use case-insensitive matching. + // String search params can contain `%` and '_' as SQL LIKE pattern match wildcard expressions. + // example="dan%" will match "danger" and "Danzig" but not "David" + // example="D_m%" will match "Damage" and "dump" + // + // Integer search params can accept a single value or a comma separated list of values. The multiple + // values will be combined under a logical OR operation - results will match at least one of + // the given values. + // + // Most search params can accept "IS NULL" and "NOT NULL" as special expressions to match + // or exclude (respectively) rows where the column is null. + // + // Boolean search params accept only "true" and "false" as values. + // + // + // (*) Results are always filtered to the level of information the caller is permitted to view. + // Looker admins can see all user details; normal users in an open system can see + // names of other users but no details; normal users in a closed system can only see + // names of other users who are members of the same group as the user. + // + // + rpc SearchUsers(SearchUsersRequest) returns (SearchUsersResponse); + + // ### Search for user accounts by name + // + // Returns all user accounts where `first_name` OR `last_name` OR `email` field values match a pattern. + // The pattern can contain `%` and `_` wildcards as in SQL LIKE expressions. + // + // Any additional search params will be combined into a logical AND expression. + // + rpc SearchUsersNames(SearchUsersNamesRequest) returns (SearchUsersNamesResponse); + + // ### Get information about the user with a specific id. + // + // If the caller is an admin or the caller is the user being specified, then full user information will + // be returned. Otherwise, a minimal 'public' variant of the user information will be returned. This contains + // The user name and avatar url, but no sensitive information. + // + rpc User(UserRequest) returns (UserResponse); + + // ### Update information about the user with a specific id. + // + rpc UpdateUser(UpdateUserRequest) returns (UpdateUserResponse); + + // ### Delete the user with a specific id. + // + // **DANGER** this will delete the user and all looks and other information owned by the user. + // + rpc DeleteUser(DeleteUserRequest) returns (DeleteUserResponse); + + // ### Get information about the user with a credential of given type with specific id. + // + // This is used to do things like find users by their embed external_user_id. Or, find the user with + // a given api3 client_id, etc. The 'credential_type' matchs the 'type' name of the various credential + // types. It must be one of the values listed in the table below. The 'credential_id' is your unique Id + // for the user and is specific to each type of credential. + // + // An example using the Ruby sdk might look like: + // + // `sdk.user_for_credential('embed', 'customer-4959425')` + // + // This table shows the supported 'Credential Type' strings. The right column is for reference; it shows + // which field in the given credential type is actually searched when finding a user with the supplied + // 'credential_id'. + // + // | Credential Types | Id Field Matched | + // | ---------------- | ---------------- | + // | email | email | + // | google | google_user_id | + // | saml | saml_user_id | + // | oidc | oidc_user_id | + // | ldap | ldap_id | + // | api | token | + // | api3 | client_id | + // | embed | external_user_id | + // | looker_openid | email | + // + // NOTE: The 'api' credential type was only used with the legacy Looker query API and is no longer supported. The credential type for API you are currently looking at is 'api3'. + // + // + rpc UserForCredential(UserForCredentialRequest) returns (UserForCredentialResponse); + + // ### Email/password login information for the specified user. + rpc UserCredentialsEmail(UserCredentialsEmailRequest) returns (UserCredentialsEmailResponse); + + // ### Email/password login information for the specified user. + rpc CreateUserCredentialsEmail(CreateUserCredentialsEmailRequest) returns (CreateUserCredentialsEmailResponse); + + // ### Email/password login information for the specified user. + rpc UpdateUserCredentialsEmail(UpdateUserCredentialsEmailRequest) returns (UpdateUserCredentialsEmailResponse); + + // ### Email/password login information for the specified user. + rpc DeleteUserCredentialsEmail(DeleteUserCredentialsEmailRequest) returns (DeleteUserCredentialsEmailResponse); + + // ### Two-factor login information for the specified user. + rpc UserCredentialsTotp(UserCredentialsTotpRequest) returns (UserCredentialsTotpResponse); + + // ### Two-factor login information for the specified user. + rpc CreateUserCredentialsTotp(CreateUserCredentialsTotpRequest) returns (CreateUserCredentialsTotpResponse); + + // ### Two-factor login information for the specified user. + rpc DeleteUserCredentialsTotp(DeleteUserCredentialsTotpRequest) returns (DeleteUserCredentialsTotpResponse); + + // ### LDAP login information for the specified user. + rpc UserCredentialsLdap(UserCredentialsLdapRequest) returns (UserCredentialsLdapResponse); + + // ### LDAP login information for the specified user. + rpc DeleteUserCredentialsLdap(DeleteUserCredentialsLdapRequest) returns (DeleteUserCredentialsLdapResponse); + + // ### Google authentication login information for the specified user. + rpc UserCredentialsGoogle(UserCredentialsGoogleRequest) returns (UserCredentialsGoogleResponse); + + // ### Google authentication login information for the specified user. + rpc DeleteUserCredentialsGoogle(DeleteUserCredentialsGoogleRequest) returns (DeleteUserCredentialsGoogleResponse); + + // ### Saml authentication login information for the specified user. + rpc UserCredentialsSaml(UserCredentialsSamlRequest) returns (UserCredentialsSamlResponse); + + // ### Saml authentication login information for the specified user. + rpc DeleteUserCredentialsSaml(DeleteUserCredentialsSamlRequest) returns (DeleteUserCredentialsSamlResponse); + + // ### OpenID Connect (OIDC) authentication login information for the specified user. + rpc UserCredentialsOidc(UserCredentialsOidcRequest) returns (UserCredentialsOidcResponse); + + // ### OpenID Connect (OIDC) authentication login information for the specified user. + rpc DeleteUserCredentialsOidc(DeleteUserCredentialsOidcRequest) returns (DeleteUserCredentialsOidcResponse); + + // ### API 3 login information for the specified user. This is for the newer API keys that can be added for any user. + rpc UserCredentialsApi3(UserCredentialsApi3Request) returns (UserCredentialsApi3Response); + + // ### API 3 login information for the specified user. This is for the newer API keys that can be added for any user. + rpc DeleteUserCredentialsApi3(DeleteUserCredentialsApi3Request) returns (DeleteUserCredentialsApi3Response); + + // ### API 3 login information for the specified user. This is for the newer API keys that can be added for any user. + rpc AllUserCredentialsApi3s(AllUserCredentialsApi3sRequest) returns (AllUserCredentialsApi3sResponse); + + // ### API 3 login information for the specified user. This is for the newer API keys that can be added for any user. + rpc CreateUserCredentialsApi3(CreateUserCredentialsApi3Request) returns (CreateUserCredentialsApi3Response); + + // ### Embed login information for the specified user. + rpc UserCredentialsEmbed(UserCredentialsEmbedRequest) returns (UserCredentialsEmbedResponse); + + // ### Embed login information for the specified user. + rpc DeleteUserCredentialsEmbed(DeleteUserCredentialsEmbedRequest) returns (DeleteUserCredentialsEmbedResponse); + + // ### Embed login information for the specified user. + rpc AllUserCredentialsEmbeds(AllUserCredentialsEmbedsRequest) returns (AllUserCredentialsEmbedsResponse); + + // ### Looker Openid login information for the specified user. Used by Looker Analysts. + rpc UserCredentialsLookerOpenid(UserCredentialsLookerOpenidRequest) returns (UserCredentialsLookerOpenidResponse); + + // ### Looker Openid login information for the specified user. Used by Looker Analysts. + rpc DeleteUserCredentialsLookerOpenid(DeleteUserCredentialsLookerOpenidRequest) returns (DeleteUserCredentialsLookerOpenidResponse); + + // ### Web login session for the specified user. + rpc UserSession(UserSessionRequest) returns (UserSessionResponse); + + // ### Web login session for the specified user. + rpc DeleteUserSession(DeleteUserSessionRequest) returns (DeleteUserSessionResponse); + + // ### Web login session for the specified user. + rpc AllUserSessions(AllUserSessionsRequest) returns (AllUserSessionsResponse); + + // ### Create a password reset token. + // This will create a cryptographically secure random password reset token for the user. + // If the user already has a password reset token then this invalidates the old token and creates a new one. + // The token is expressed as the 'password_reset_url' of the user's email/password credential object. + // This takes an optional 'expires' param to indicate if the new token should be an expiring token. + // Tokens that expire are typically used for self-service password resets for existing users. + // Invitation emails for new users typically are not set to expire. + // The expire period is always 60 minutes when expires is enabled. + // This method can be called with an empty body. + // + rpc CreateUserCredentialsEmailPasswordReset(CreateUserCredentialsEmailPasswordResetRequest) returns (CreateUserCredentialsEmailPasswordResetResponse); + + // ### Get information about roles of a given user + // + rpc UserRoles(UserRolesRequest) returns (UserRolesResponse); + + // ### Set roles of the user with a specific id. + // + rpc SetUserRoles(SetUserRolesRequest) returns (SetUserRolesResponse); + + // ### Get user attribute values for a given user. + // + // Returns the values of specified user attributes (or all user attributes) for a certain user. + // + // A value for each user attribute is searched for in the following locations, in this order: + // + // 1. in the user's account information + // 1. in groups that the user is a member of + // 1. the default value of the user attribute + // + // If more than one group has a value defined for a user attribute, the group with the lowest rank wins. + // + // The response will only include user attributes for which values were found. Use `include_unset=true` to include + // empty records for user attributes with no value. + // + // The value of all hidden user attributes will be blank. + // + rpc UserAttributeUserValues(UserAttributeUserValuesRequest) returns (UserAttributeUserValuesResponse); + + // ### Store a custom value for a user attribute in a user's account settings. + // + // Per-user user attribute values take precedence over group or default values. + // + rpc SetUserAttributeUserValue(SetUserAttributeUserValueRequest) returns (SetUserAttributeUserValueResponse); + + // ### Delete a user attribute value from a user's account settings. + // + // After the user attribute value is deleted from the user's account settings, subsequent requests + // for the user attribute value for this user will draw from the user's groups or the default + // value of the user attribute. See [Get User Attribute Values](#!/User/user_attribute_user_values) for more + // information about how user attribute values are resolved. + // + rpc DeleteUserAttributeUserValue(DeleteUserAttributeUserValueRequest) returns (DeleteUserAttributeUserValueResponse); + + // ### Send a password reset token. + // This will send a password reset email to the user. If a password reset token does not already exist + // for this user, it will create one and then send it. + // If the user has not yet set up their account, it will send a setup email to the user. + // The URL sent in the email is expressed as the 'password_reset_url' of the user's email/password credential object. + // Password reset URLs will expire in 60 minutes. + // This method can be called with an empty body. + // + rpc SendUserCredentialsEmailPasswordReset(SendUserCredentialsEmailPasswordResetRequest) returns (SendUserCredentialsEmailPasswordResetResponse); + + + + // UserAttribute: Manage User Attributes + + // ### Get information about all user attributes. + // + rpc AllUserAttributes(AllUserAttributesRequest) returns (AllUserAttributesResponse); + + // ### Create a new user attribute + // + // Permission information for a user attribute is conveyed through the `can` and `user_can_edit` fields. + // The `user_can_edit` field indicates whether an attribute is user-editable _anywhere_ in the application. + // The `can` field gives more granular access information, with the `set_value` child field indicating whether + // an attribute's value can be set by [Setting the User Attribute User Value](#!/User/set_user_attribute_user_value). + // + // Note: `name` and `label` fields must be unique across all user attributes in the Looker instance. + // Attempting to create a new user attribute with a name or label that duplicates an existing + // user attribute will fail with a 422 error. + // + rpc CreateUserAttribute(CreateUserAttributeRequest) returns (CreateUserAttributeResponse); + + // ### Get information about a user attribute. + // + rpc UserAttribute(UserAttributeRequest) returns (UserAttributeResponse); + + // ### Update a user attribute definition. + // + rpc UpdateUserAttribute(UpdateUserAttributeRequest) returns (UpdateUserAttributeResponse); + + // ### Delete a user attribute (admin only). + // + rpc DeleteUserAttribute(DeleteUserAttributeRequest) returns (DeleteUserAttributeResponse); + + // ### Returns all values of a user attribute defined by user groups, in precedence order. + // + // A user may be a member of multiple groups which define different values for a given user attribute. + // The order of group-values in the response determines precedence for selecting which group-value applies + // to a given user. For more information, see [Set User Attribute Group Values](#!/UserAttribute/set_user_attribute_group_values). + // + // Results will only include groups that the caller's user account has permission to see. + // + rpc AllUserAttributeGroupValues(AllUserAttributeGroupValuesRequest) returns (AllUserAttributeGroupValuesResponse); + + // ### Define values for a user attribute across a set of groups, in priority order. + // + // This function defines all values for a user attribute defined by user groups. This is a global setting, potentially affecting + // all users in the system. This function replaces any existing group value definitions for the indicated user attribute. + // + // The value of a user attribute for a given user is determined by searching the following locations, in this order: + // + // 1. the user's account settings + // 2. the groups that the user is a member of + // 3. the default value of the user attribute, if any + // + // The user may be a member of multiple groups which define different values for that user attribute. The order of items in the group_values parameter + // determines which group takes priority for that user. Lowest array index wins. + // + // An alternate method to indicate the selection precedence of group-values is to assign numbers to the 'rank' property of each + // group-value object in the array. Lowest 'rank' value wins. If you use this technique, you must assign a + // rank value to every group-value object in the array. + // + // To set a user attribute value for a single user, see [Set User Attribute User Value](#!/User/set_user_attribute_user_value). + // To set a user attribute value for all members of a group, see [Set User Attribute Group Value](#!/Group/update_user_attribute_group_value). + // + rpc SetUserAttributeGroupValues(SetUserAttributeGroupValuesRequest) returns (SetUserAttributeGroupValuesResponse); + + + + // Workspace: Manage Workspaces + + // ### Get All Workspaces + // + // Returns all workspaces available to the calling user. + // + rpc AllWorkspaces(AllWorkspacesRequest) returns (AllWorkspacesResponse); + + // ### Get A Workspace + // + // Returns information about a workspace such as the git status and selected branches + // of all projects available to the caller's user account. + // + // A workspace defines which versions of project files will be used to evaluate expressions + // and operations that use model definitions - operations such as running queries or rendering dashboards. + // Each project has its own git repository, and each project in a workspace may be configured to reference + // particular branch or revision within their respective repositories. + // + // There are two predefined workspaces available: "production" and "dev". + // + // The production workspace is shared across all Looker users. Models in the production workspace are read-only. + // Changing files in production is accomplished by modifying files in a git branch and using Pull Requests + // to merge the changes from the dev branch into the production branch, and then telling + // Looker to sync with production. + // + // The dev workspace is local to each Looker user. Changes made to project/model files in the dev workspace only affect + // that user, and only when the dev workspace is selected as the active workspace for the API session. + // (See set_session_workspace()). + // + // The dev workspace is NOT unique to an API session. Two applications accessing the Looker API using + // the same user account will see the same files in the dev workspace. To avoid collisions between + // API clients it's best to have each client login with API3 credentials for a different user account. + // + // Changes made to files in a dev workspace are persistent across API sessions. It's a good + // idea to commit any changes you've made to the git repository, but not strictly required. Your modified files + // reside in a special user-specific directory on the Looker server and will still be there when you login in again + // later and use update_session(workspace_id: "dev") to select the dev workspace for the new API session. + // + rpc Workspace(WorkspaceRequest) returns (WorkspaceResponse); + + +} \ No newline at end of file diff --git a/proto/grpc_proxy/src/main/proto/sdk/models.proto b/proto/grpc_proxy/src/main/proto/sdk/models.proto new file mode 100644 index 000000000..309cbe600 --- /dev/null +++ b/proto/grpc_proxy/src/main/proto/sdk/models.proto @@ -0,0 +1,10697 @@ +// MIT License +// +// Copyright (c) 2019 Looker Data Sciences, Inc. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +// 203 API models: 203 Spec, 0 Request, 0 Write, 0 Enum + + +syntax = "proto3"; + +package looker; + +option java_package = "com.google.looker.grpc.services"; +option java_multiple_files = true; + +import "google/protobuf/any.proto"; +import "google/protobuf/timestamp.proto"; + + +message AccessToken { + // Access Token used for API calls + string access_token = 484733480; + // Type of Token + string token_type = 101507520; + // Number of seconds before the token expires + int64 expires_in = 416905464; + // Refresh token which can be used to obtain a new access token + string refresh_token = 358008858; +} + +// The appropriate horizontal text alignment the values of this field should be displayed in. Valid values are: "left", "right". +enum Align { + _ALIGN_UNSET = 0; + ALIGN_LEFT = 528533215; + ALIGN_RIGHT = 400500241; +} + + +message ApiSession { + // Operations the current user is able to perform on this object + map can = 98256; + // The id of active workspace for this session + string workspace_id = 394620993; + // The id of the actual user in the case when this session represents one user sudo'ing as another + int64 sudo_user_id = 287410032; +} + + +message ApiVersion { + // Current Looker release version number + string looker_release_version = 308761353; + ApiVersionElement current_version = 301857801; + // Array of versions supported by this Looker instance + repeated ApiVersionElement supported_versions = 507588669; +} + + +message ApiVersionElement { + // Version number as it appears in '/api/xxx/' urls + string version = 351608024; + // Full version number including minor version + string full_version = 380320202; + // Status of this version + string status = 446240775; + // Url for swagger.json for this version + string swagger_url = 427126350; +} + + +message BackupConfiguration { + // Operations the current user is able to perform on this object + map can = 98256; + // Type of backup: looker-s3 or custom-s3 + string type = 3575610; + // Name of bucket for custom-s3 backups + string custom_s3_bucket = 358719282; + // Name of region where the bucket is located + string custom_s3_bucket_region = 278083340; + // (Write-Only) AWS S3 key used for custom-s3 backups + string custom_s3_key = 277162771; + // (Write-Only) AWS S3 secret used for custom-s3 backups + string custom_s3_secret = 245512607; + // Link to get this item + string url = 116079; +} + + +message Board { + // Operations the current user is able to perform on this object + map can = 98256; + // Id of associated content_metadata record + int64 content_metadata_id = 293222822; + // Date of board creation + google.protobuf.Timestamp created_at = 342420026; + // Date of board deletion + google.protobuf.Timestamp deleted_at = 441264006; + // Description of the board + string description = 431136513; + // Sections of the board + repeated BoardSection board_sections = 499514956; + // Unique Id + int64 id = 3355; + // ids of the board sections in the order they should be displayed + repeated int64 section_order = 328304035; + // Title of the board + string title = 110371416; + // Date of last board update + google.protobuf.Timestamp updated_at = 295464393; + // User id of board creator + int64 user_id = 147132913; + // Whether the board is the primary homepage or not + bool primary_homepage = 522659170; +} + + +message BoardItem { + // Operations the current user is able to perform on this object + map can = 98256; + // Name of user who created the content this item is based on + string content_created_by = 310271269; + // Content favorite id associated with the item this content is based on + int64 content_favorite_id = 403544586; + // Content metadata id associated with the item this content is based on + int64 content_metadata_id = 293222822; + // Last time the content that this item is based on was updated + string content_updated_at = 424059459; + // Dashboard to base this item on + int64 dashboard_id = 496187565; + // The actual description for display + string description = 431136513; + // Number of times content has been favorited, if present + int64 favorite_count = 358762965; + // Associated Board Section + int64 board_section_id = 501228011; + // Unique Id + int64 id = 3355; + // The container folder name of the content + string location = 475260909; + // Look to base this item on + int64 look_id = 349778619; + // LookML Dashboard to base this item on + string lookml_dashboard_id = 339184839; + // An arbitrary integer representing the sort order within the section + int64 order = 106006350; + // The actual title for display + string title = 110371416; + // Relative url for the associated content + string url = 116079; + // Number of times content has been viewed, if present + int64 view_count = 383588418; +} + + +message BoardSection { + // Operations the current user is able to perform on this object + map can = 98256; + // Time at which this section was created. + google.protobuf.Timestamp created_at = 342420026; + // Time at which this section was deleted. + google.protobuf.Timestamp deleted_at = 441264006; + // Description of the content found in this section. + string description = 431136513; + // Id reference to parent board + int64 board_id = 402691141; + // Items in the board section + repeated BoardItem board_items = 345473718; + // Unique Id + int64 id = 3355; + // ids of the board items in the order they should be displayed + repeated int64 item_order = 488938807; + // Name of row + string title = 110371416; + // Time at which this section was last updated. + google.protobuf.Timestamp updated_at = 295464393; +} + +// Field category Valid values are: "parameter", "filter", "measure", "dimension". +enum Category { + _CATEGORY_UNSET = 0; + CATEGORY_PARAMETER = 421572962; + CATEGORY_FILTER = 428014217; + CATEGORY_MEASURE = 258713341; + CATEGORY_DIMENSION = 340795438; +} + + +message ColorCollection { + // Unique Id + string id = 3355; + // Label of color collection + string label = 102727412; + // Array of categorical palette definitions + repeated DiscretePalette categoricalPalettes = 181997160; + // Array of discrete palette definitions + repeated ContinuousPalette sequentialPalettes = 296477661; + // Array of diverging palette definitions + repeated ContinuousPalette divergingPalettes = 417697719; +} + + +message ColorStop { + // CSS color string + string color = 94842723; + // Offset in continuous palette (0 to 100) + int64 offset = 509889974; +} + + +message ColumnSearch { + // Name of schema containing the table + string schema_name = 505336523; + // Name of table containing the column + string table_name = 376157673; + // Name of column + string column_name = 417463574; + // Column data type + string data_type = 363359569; +} + + +message Command { + // Id of the command record + int64 id = 3355; + // Id of the command author + int64 author_id = 368900115; + // Name of the command + string name = 3373707; + // Description of the command + string description = 431136513; + // Id of the content associated with the command + string linked_content_id = 63843193; + // Name of the command Valid values are: "dashboard", "lookml_dashboard". + LinkedContentType linked_content_type = 305853718; +} + + +message ConnectionFeatures { + // Name of the dialect for this connection + string dialect_name = 453584193; + // True for cost estimating support + bool cost_estimate = 304953513; + // True for multiple database support + bool multiple_databases = 52008215; + // True for cost estimating support + bool column_search = 470196568; + // True for secondary index support + bool persistent_table_indexes = 409191033; + // True for persistent derived table support + bool persistent_derived_tables = 355959624; + // True for turtles support + bool turtles = 482650276; + // True for percentile support + bool percentile = 460912481; + // True for distinct percentile support + bool distinct_percentile = 338510432; + // True for stable views support + bool stable_views = 431662053; + // True for millisecond support + bool milliseconds = 85195282; + // True for microsecond support + bool microseconds = 366488014; + // True for subtotal support + bool subtotals = 277302775; + // True for geographic location support + bool location = 475260909; + // True for timezone conversion in query support + bool timezone = 519056897; + // True for connection pooling support + bool connection_pooling = 516929414; +} + + +message ContentFavorite { + // Unique Id + int64 id = 3355; + // User Id which owns this ContentFavorite + int64 user_id = 147132913; + // Content Metadata Id associated with this ContentFavorite + int64 content_metadata_id = 293222822; + // Id of a look + int64 look_id = 349778619; + // Id of a dashboard + int64 dashboard_id = 496187565; + LookBasic look = 3327647; + DashboardBase dashboard = 523930294; + // Id of a board + int64 board_id = 402691141; +} + + +message ContentMeta { + // Operations the current user is able to perform on this object + map can = 98256; + // Unique Id + int64 id = 3355; + // Name or title of underlying content + string name = 3373707; + // Id of Parent Content + int64 parent_id = 517581876; + // Id of associated dashboard when content_type is "dashboard" + string dashboard_id = 496187565; + // Id of associated look when content_type is "look" + int64 look_id = 349778619; + // Id of associated folder when content_type is "space" + string folder_id = 527488652; + // Content Type ("dashboard", "look", or "folder") + string content_type = 415923104; + // Whether content inherits its access levels from parent + bool inherits = 226856664; + // Id of Inherited Content + int64 inheriting_id = 350353364; + // Content Slug + string slug = 3533483; +} + + +message ContentMetaGroupUser { + // Operations the current user is able to perform on this object + map can = 98256; + // Unique Id + string id = 3355; + // Id of associated Content Metadata + string content_metadata_id = 293222822; + // Type of permission: "view" or "edit" Valid values are: "view", "edit". + PermissionType permission_type = 405376277; + // ID of associated group + int64 group_id = 506361563; + // ID of associated user + int64 user_id = 147132913; +} + + +message ContentValidation { + // A list of content errors + repeated ContentValidatorError content_with_errors = 415894303; + // Duration of content validation in seconds + float computation_time = 279797358; + // The number of looks validated + int64 total_looks_validated = 332982930; + // The number of dashboard elements validated + int64 total_dashboard_elements_validated = 231059308; + // The number of dashboard filters validated + int64 total_dashboard_filters_validated = 437898084; + // The number of scheduled plans validated + int64 total_scheduled_plans_validated = 504055445; + // The number of alerts validated + int64 total_alerts_validated = 392535167; + // The number of explores used across all content validated + int64 total_explores_validated = 481205653; +} + + +message ContentValidationAlert { + // ID of the alert + int64 id = 3355; + // ID of the LookML dashboard associated with the alert + string lookml_dashboard_id = 339184839; + // ID of the LookML dashboard element associated with the alert + string lookml_link_id = 458227247; + // An optional, user-defined title for the alert + string custom_title = 365526021; +} + + +message ContentValidationDashboard { + // Description + string description = 431136513; + // Unique Id + string id = 3355; + ContentValidationFolder folder = 317241572; + // Dashboard Title + string title = 110371416; +} + + +message ContentValidationDashboardElement { + // Text tile body text + string body_text = 313253482; + // Id of Dashboard + string dashboard_id = 496187565; + // Unique Id + string id = 3355; + // Id Of Look + string look_id = 349778619; + // Note Display + string note_display = 33923723; + // Note State + string note_state = 312488702; + // Note Text + string note_text = 445242710; + // Note Text as Html + string note_text_as_html = 459711923; + // Id Of Query + int64 query_id = 291663619; + // Text tile subtitle text + string subtitle_text = 527614074; + // Title of dashboard element + string title = 110371416; + // Whether title is hidden + bool title_hidden = 278123192; + // Text tile title + string title_text = 443341651; + // Type + string type = 3575610; +} + + +message ContentValidationDashboardFilter { + // Unique Id + string id = 3355; + // Id of Dashboard + string dashboard_id = 496187565; + // Name of filter + string name = 3373707; + // Title of filter + string title = 110371416; + // Type of filter: one of date, number, string, or field + string type = 3575610; + // Default value of filter + string default_value = 329667964; + // Model of filter (required if type = field) + string model = 104069929; + // Explore of filter (required if type = field) + string explore = 327287131; + // Dimension of filter (required if type = field) + string dimension = 273753254; +} + + +message ContentValidationError { + // Error message + string message = 477462531; + // Name of the field involved in the error + string field_name = 288329560; + // Name of the model involved in the error + string model_name = 526217848; + // Name of the explore involved in the error + string explore_name = 485574866; + // Whether this validation error is removable + bool removable = 320564534; +} + + +message ContentValidationFolder { + // Unique Name + string name = 3373707; + // Unique Id + string id = 3355; +} + + +message ContentValidationLook { + // Unique Id + int64 id = 3355; + // Look Title + string title = 110371416; + ContentValidationFolder folder = 317241572; +} + + +message ContentValidationLookMLDashboard { + // ID of the LookML Dashboard + string id = 3355; + // Title of the LookML Dashboard + string title = 110371416; + // ID of Space + string space_id = 511862461; +} + + +message ContentValidationLookMLDashboardElement { + // Link ID of the LookML Dashboard Element + string lookml_link_id = 458227247; + // Title of the LookML Dashboard Element + string title = 110371416; +} + + +message ContentValidationScheduledPlan { + // Name of this scheduled plan + string name = 3373707; + // Id of a look + int64 look_id = 349778619; + // Unique Id + int64 id = 3355; +} + + +message ContentValidatorError { + ContentValidationLook look = 3327647; + ContentValidationDashboard dashboard = 523930294; + ContentValidationDashboardElement dashboard_element = 100916305; + ContentValidationDashboardFilter dashboard_filter = 512220191; + ContentValidationScheduledPlan scheduled_plan = 298382269; + ContentValidationAlert alert = 92899676; + ContentValidationLookMLDashboard lookml_dashboard = 532857865; + ContentValidationLookMLDashboardElement lookml_dashboard_element = 469836468; + // A list of errors found for this piece of content + repeated ContentValidationError errors = 323658789; + // An id unique to this piece of content for this validation run + string id = 3355; +} + + +message ContentView { + // Operations the current user is able to perform on this object + map can = 98256; + // Unique Id + int64 id = 3355; + // Id of viewed Look + int64 look_id = 349778619; + // Id of the viewed Dashboard + int64 dashboard_id = 496187565; + // Content metadata id of the Look or Dashboard + int64 content_metadata_id = 293222822; + // Id of user content was viewed by + int64 user_id = 147132913; + // Id of group content was viewed by + int64 group_id = 506361563; + // Number of times piece of content was viewed + int64 view_count = 383588418; + // Number of times piece of content was favorited + int64 favorite_count = 358762965; + // Date the piece of content was last viewed + string last_viewed_at = 273134349; + // Week start date for the view and favorite count during that given week + string start_of_week_date = 75308398; +} + + +message ContinuousPalette { + // Unique identity string + string id = 3355; + // Label for palette + string label = 102727412; + // Type of palette + string type = 3575610; + // Array of ColorStops in the palette + repeated ColorStop stops = 109770929; +} + + +message CostEstimate { + // Cost of SQL statement + int64 cost = 3059661; + // Does the result come from the cache? + bool cache_hit = 276631381; + // Cost measurement size + string cost_unit = 269894986; + // Human-friendly message + string message = 477462531; +} + + +message CreateCostEstimate { + // SQL statement to estimate + string sql = 114126; +} + + +message CreateDashboardFilter { + // Unique Id + string id = 3355; + // Id of Dashboard + string dashboard_id = 496187565; + // Name of filter + string name = 3373707; + // Title of filter + string title = 110371416; + // Type of filter: one of date, number, string, or field + string type = 3575610; + // Default value of filter + string default_value = 329667964; + // Model of filter (required if type = field) + string model = 104069929; + // Explore of filter (required if type = field) + string explore = 327287131; + // Dimension of filter (required if type = field) + string dimension = 273753254; + // Field information + map field = 97427706; + // Display order of this filter relative to other filters + int64 row = 113114; + // Array of listeners for faceted filters + repeated string listens_to_filters = 326256077; + // Whether the filter allows multiple filter values + bool allow_multiple_values = 444540061; + // Whether the filter requires a value to run the dashboard + bool required = 393139297; + // The visual configuration for this filter. Used to set up how the UI for this filter should appear. + map ui_config = 338401068; +} + + +message CreateDashboardRenderTask { + // Filter values to apply to the dashboard queries, in URL query format + string dashboard_filters = 454602920; + // Dashboard layout style: single_column or tiled + string dashboard_style = 394441966; +} + + +message CreateFolder { + // Unique Name + string name = 3373707; + // Id of Parent. If the parent id is null, this is a root-level entry + string parent_id = 517581876; +} + + +message CreateQueryTask { + // Operations the current user is able to perform on this object + map can = 98256; + // Id of query to run + int64 query_id = 291663619; + // Desired async query result format. Valid values are: "inline_json", "json", "json_detail", "json_fe", "csv", "html", "md", "txt", "xlsx", "gsxml". + ResultFormat result_format = 283324265; + // Source of query task + string source = 448252914; + // Create the task but defer execution + bool deferred = 323945455; + // Id of look associated with query. + int64 look_id = 349778619; + // Id of dashboard associated with query. + string dashboard_id = 496187565; +} + + +message CredentialsApi3 { + // Operations the current user is able to perform on this object + map can = 98256; + // Unique Id + int64 id = 3355; + // API key client_id + string client_id = 476022396; + // Timestamp for the creation of this credential + string created_at = 342420026; + // Has this credential been disabled? + bool is_disabled = 464089615; + // Short name for the type of this kind of credential + string type = 3575610; + // Link to get this item + string url = 116079; +} + + +message CredentialsEmail { + // Operations the current user is able to perform on this object + map can = 98256; + // Timestamp for the creation of this credential + string created_at = 342420026; + // EMail address used for user login + string email = 96619420; + // Force the user to change their password upon their next login + bool forced_password_reset_at_next_login = 457930717; + // Has this credential been disabled? + bool is_disabled = 464089615; + // Timestamp for most recent login using credential + string logged_in_at = 219006096; + // Url with one-time use secret token that the user can use to reset password + string password_reset_url = 409079926; + // Short name for the type of this kind of credential + string type = 3575610; + // Link to get this item + string url = 116079; + // Link to get this user + string user_url = 266140933; +} + + +message CredentialsEmbed { + // Operations the current user is able to perform on this object + map can = 98256; + // Timestamp for the creation of this credential + string created_at = 342420026; + // Embedder's id for a group to which this user was added during the most recent login + string external_group_id = 515008972; + // Embedder's unique id for the user + string external_user_id = 438485145; + // Unique Id + int64 id = 3355; + // Has this credential been disabled? + bool is_disabled = 464089615; + // Timestamp for most recent login using credential + string logged_in_at = 219006096; + // Short name for the type of this kind of credential + string type = 3575610; + // Link to get this item + string url = 116079; +} + + +message CredentialsGoogle { + // Operations the current user is able to perform on this object + map can = 98256; + // Timestamp for the creation of this credential + string created_at = 342420026; + // Google domain + string domain = 331549391; + // EMail address + string email = 96619420; + // Google's Unique ID for this user + string google_user_id = 424203645; + // Has this credential been disabled? + bool is_disabled = 464089615; + // Timestamp for most recent login using credential + string logged_in_at = 219006096; + // Short name for the type of this kind of credential + string type = 3575610; + // Link to get this item + string url = 116079; +} + + +message CredentialsLDAP { + // Operations the current user is able to perform on this object + map can = 98256; + // Timestamp for the creation of this credential + string created_at = 342420026; + // EMail address + string email = 96619420; + // Has this credential been disabled? + bool is_disabled = 464089615; + // LDAP Distinguished name for this user (as-of the last login) + string ldap_dn = 22077474; + // LDAP Unique ID for this user + string ldap_id = 22077619; + // Timestamp for most recent login using credential + string logged_in_at = 219006096; + // Short name for the type of this kind of credential + string type = 3575610; + // Link to get this item + string url = 116079; +} + + +message CredentialsLookerOpenid { + // Operations the current user is able to perform on this object + map can = 98256; + // Timestamp for the creation of this credential + string created_at = 342420026; + // EMail address used for user login + string email = 96619420; + // Has this credential been disabled? + bool is_disabled = 464089615; + // Timestamp for most recent login using credential + string logged_in_at = 219006096; + // IP address of client for most recent login using credential + string logged_in_ip = 219006340; + // Short name for the type of this kind of credential + string type = 3575610; + // Link to get this item + string url = 116079; + // Link to get this user + string user_url = 266140933; +} + + +message CredentialsOIDC { + // Operations the current user is able to perform on this object + map can = 98256; + // Timestamp for the creation of this credential + string created_at = 342420026; + // EMail address + string email = 96619420; + // Has this credential been disabled? + bool is_disabled = 464089615; + // Timestamp for most recent login using credential + string logged_in_at = 219006096; + // OIDC OP's Unique ID for this user + string oidc_user_id = 340693525; + // Short name for the type of this kind of credential + string type = 3575610; + // Link to get this item + string url = 116079; +} + + +message CredentialsSaml { + // Operations the current user is able to perform on this object + map can = 98256; + // Timestamp for the creation of this credential + string created_at = 342420026; + // EMail address + string email = 96619420; + // Has this credential been disabled? + bool is_disabled = 464089615; + // Timestamp for most recent login using credential + string logged_in_at = 219006096; + // Saml IdP's Unique ID for this user + string saml_user_id = 211576771; + // Short name for the type of this kind of credential + string type = 3575610; + // Link to get this item + string url = 116079; +} + + +message CredentialsTotp { + // Operations the current user is able to perform on this object + map can = 98256; + // Timestamp for the creation of this credential + string created_at = 342420026; + // Has this credential been disabled? + bool is_disabled = 464089615; + // Short name for the type of this kind of credential + string type = 3575610; + // User has verified + bool verified = 498595918; + // Link to get this item + string url = 116079; +} + + +message CustomWelcomeEmail { + // Operations the current user is able to perform on this object + map can = 98256; + // If true, custom email content will replace the default body of welcome emails + bool enabled = 402398511; + // The HTML to use as custom content for welcome emails. Script elements and other potentially dangerous markup will be removed. + string content = 475765308; + // The text to appear in the email subject line. + string subject = 466971317; + // The text to appear in the header line of the email body. + string header = 305317724; +} + + +message Dashboard { + // Operations the current user is able to perform on this object + map can = 98256; + // Content Favorite Id + int64 content_favorite_id = 403544586; + // Id of content metadata + int64 content_metadata_id = 293222822; + // Description + string description = 431136513; + // Is Hidden + bool hidden = 304371861; + // Unique Id + string id = 3355; + LookModel model = 104069929; + // Timezone in which the Dashboard will run by default. + string query_timezone = 202424720; + // Is Read-only + bool readonly = 433365215; + // Refresh Interval, as a time duration phrase like "2 hours 30 minutes". A number with no time units will be interpreted as whole seconds. + string refresh_interval = 454206058; + // Refresh Interval in milliseconds + int64 refresh_interval_to_i = 403845601; + FolderBase folder = 317241572; + // Dashboard Title + string title = 110371416; + // Id of User + int64 user_id = 147132913; + // Background color + string background_color = 509195076; + // Time that the Dashboard was created. + google.protobuf.Timestamp created_at = 342420026; + // Enables crossfiltering in dashboards - only available in dashboards-next (beta) + bool crossfilter_enabled = 315812131; + // Elements + repeated DashboardElement dashboard_elements = 291640431; + // Filters + repeated DashboardFilter dashboard_filters = 454602920; + // Layouts + repeated DashboardLayout dashboard_layouts = 430524919; + // Whether or not a dashboard is 'soft' deleted. + bool deleted = 387615750; + // Time that the Dashboard was 'soft' deleted. + google.protobuf.Timestamp deleted_at = 441264006; + // Id of User that 'soft' deleted the dashboard. + int64 deleter_id = 441368332; + // Relative path of URI of LookML file to edit the dashboard (LookML dashboard only). + string edit_uri = 400593773; + // Number of times favorited + int64 favorite_count = 358762965; + // Time the dashboard was last accessed + google.protobuf.Timestamp last_accessed_at = 391609649; + // Time last viewed in the Looker web UI + google.protobuf.Timestamp last_viewed_at = 273134349; + // configuration option that governs how dashboard loading will happen. + string load_configuration = 278159272; + // Links this dashboard to a particular LookML dashboard such that calling a **sync** operation on that LookML dashboard will update this dashboard to match. + string lookml_link_id = 458227247; + // Show filters bar. **Security Note:** This property only affects the *cosmetic* appearance of the dashboard, not a user's ability to access data. Hiding the filters bar does **NOT** prevent users from changing filters by other means. For information on how to set up secure data access control policies, see [Control User Access to Data](https://looker.com/docs/r/api/control-access) + bool show_filters_bar = 313221091; + // Show title + bool show_title = 281598885; + // Content Metadata Slug + string slug = 3533483; + // Id of folder + string folder_id = 527488652; + // Color of text on text tiles + string text_tile_text_color = 296014652; + // Tile background color + string tile_background_color = 472118176; + // Tile text color + string tile_text_color = 482028031; + // Title color + string title_color = 422398302; + // Number of times viewed in the Looker web UI + int64 view_count = 383588418; + DashboardAppearance appearance = 449179417; + // The preferred route for viewing this dashboard (ie: dashboards or dashboards-next) + string preferred_viewer = 518361060; +} + + +message DashboardAggregateTableLookml { + // Dashboard Id + string dashboard_id = 496187565; + // Aggregate Table LookML + string aggregate_table_lookml = 152035185; +} + + +message DashboardAppearance { + // Page margin (side) width + int64 page_side_margins = 294185644; + // Background color for the dashboard + string page_background_color = 53912382; + // Title alignment on dashboard tiles + string tile_title_alignment = 278794058; + // Space between tiles + int64 tile_space_between = 92833282; + // Background color for tiles + string tile_background_color = 472118176; + // Tile shadow on/off + bool tile_shadow = 410871563; + // Key color + string key_color = 415126687; +} + + +message DashboardBase { + // Operations the current user is able to perform on this object + map can = 98256; + // Content Favorite Id + int64 content_favorite_id = 403544586; + // Id of content metadata + int64 content_metadata_id = 293222822; + // Description + string description = 431136513; + // Is Hidden + bool hidden = 304371861; + // Unique Id + string id = 3355; + LookModel model = 104069929; + // Timezone in which the Dashboard will run by default. + string query_timezone = 202424720; + // Is Read-only + bool readonly = 433365215; + // Refresh Interval, as a time duration phrase like "2 hours 30 minutes". A number with no time units will be interpreted as whole seconds. + string refresh_interval = 454206058; + // Refresh Interval in milliseconds + int64 refresh_interval_to_i = 403845601; + FolderBase folder = 317241572; + // Dashboard Title + string title = 110371416; + // Id of User + int64 user_id = 147132913; +} + + +message DashboardElement { + // Operations the current user is able to perform on this object + map can = 98256; + // Text tile body text + string body_text = 313253482; + // Text tile body text as Html + string body_text_as_html = 381900416; + // Id of Dashboard + string dashboard_id = 496187565; + // Relative path of URI of LookML file to edit the dashboard element (LookML dashboard only). + string edit_uri = 400593773; + // Unique Id + string id = 3355; + LookWithQuery look = 3327647; + // Id Of Look + string look_id = 349778619; + // LookML link ID + string lookml_link_id = 458227247; + // ID of merge result + string merge_result_id = 374234034; + // Note Display + string note_display = 33923723; + // Note State + string note_state = 312488702; + // Note Text + string note_text = 445242710; + // Note Text as Html + string note_text_as_html = 459711923; + Query query = 107944136; + // Id Of Query + int64 query_id = 291663619; + // Refresh Interval + string refresh_interval = 454206058; + // Refresh Interval as integer + int64 refresh_interval_to_i = 403845601; + ResultMakerWithIdVisConfigAndDynamicFields result_maker = 523671970; + // ID of the ResultMakerLookup entry. + int64 result_maker_id = 347633462; + // Text tile subtitle text + string subtitle_text = 527614074; + // Title of dashboard element + string title = 110371416; + // Whether title is hidden + bool title_hidden = 278123192; + // Text tile title + string title_text = 443341651; + // Type + string type = 3575610; + // Count of Alerts associated to a dashboard element + int64 alert_count = 310385610; + // Text tile title text as Html + string title_text_as_html = 429319302; + // Text tile subtitle text as Html + string subtitle_text_as_html = 486874579; +} + + +message DashboardFilter { + // Operations the current user is able to perform on this object + map can = 98256; + // Unique Id + string id = 3355; + // Id of Dashboard + string dashboard_id = 496187565; + // Name of filter + string name = 3373707; + // Title of filter + string title = 110371416; + // Type of filter: one of date, number, string, or field + string type = 3575610; + // Default value of filter + string default_value = 329667964; + // Model of filter (required if type = field) + string model = 104069929; + // Explore of filter (required if type = field) + string explore = 327287131; + // Dimension of filter (required if type = field) + string dimension = 273753254; + // Field information + map field = 97427706; + // Display order of this filter relative to other filters + int64 row = 113114; + // Array of listeners for faceted filters + repeated string listens_to_filters = 326256077; + // Whether the filter allows multiple filter values + bool allow_multiple_values = 444540061; + // Whether the filter requires a value to run the dashboard + bool required = 393139297; + // The visual configuration for this filter. Used to set up how the UI for this filter should appear. + map ui_config = 338401068; +} + + +message DashboardLayout { + // Operations the current user is able to perform on this object + map can = 98256; + // Unique Id + string id = 3355; + // Id of Dashboard + string dashboard_id = 496187565; + // Type + string type = 3575610; + // Is Active + bool active = 355737662; + // Column Width + int64 column_width = 104395875; + // Width + int64 width = 113126854; + // Whether or not the dashboard layout is deleted. + bool deleted = 387615750; + // Title extracted from the dashboard this layout represents. + string dashboard_title = 394294212; + // Components + repeated DashboardLayoutComponent dashboard_layout_components = 358251928; +} + + +message DashboardLayoutComponent { + // Operations the current user is able to perform on this object + map can = 98256; + // Unique Id + string id = 3355; + // Id of Dashboard Layout + string dashboard_layout_id = 343841433; + // Id Of Dashboard Element + string dashboard_element_id = 79370295; + // Row + int64 row = 113114; + // Column + int64 column = 338709290; + // Width + int64 width = 113126854; + // Height + int64 height = 305257398; + // Whether or not the dashboard layout component is deleted + bool deleted = 387615750; + // Dashboard element title, extracted from the Dashboard Element. + string element_title = 294405205; + // Whether or not the dashboard element title is displayed. + bool element_title_hidden = 387069323; + // Visualization type, extracted from a query's vis_config + string vis_type = 393864446; +} + + +message DashboardLookml { + // Id of Dashboard + string dashboard_id = 496187565; + // lookml of UDD + string lookml = 274273760; +} + + +message DataActionForm { + DataActionUserState state = 109757585; + // Array of form fields. + repeated DataActionFormField fields = 318677073; +} + + +message DataActionFormField { + // Name + string name = 3373707; + // Human-readable label + string label = 102727412; + // Description of field + string description = 431136513; + // Type of field. + string type = 3575610; + // Default value of the field. + string default = 386200976; + // The URL for an oauth link, if type is 'oauth_link'. + string oauth_url = 406587406; + // Whether or not a field supports interactive forms. + bool interactive = 461026232; + // Whether or not the field is required. This is a user-interface hint. A user interface displaying this form should not submit it without a value for this field. The action server must also perform this validation. + bool required = 393139297; + // If the form type is 'select', a list of options to be selected from. + repeated DataActionFormSelectOption options = 312368728; +} + + +message DataActionFormSelectOption { + // Name + string name = 3373707; + // Human-readable label + string label = 102727412; +} + + +message DataActionRequest { + // The JSON describing the data action. This JSON should be considered opaque and should be passed through unmodified from the query result it came from. + map action = 355737714; + // User input for any form values the data action might use. + map form_values = 257529603; +} + + +message DataActionResponse { + // ID of the webhook event that sent this data action. In some error conditions, this may be null. + string webhook_id = 311874531; + // Whether the data action was successful. + bool success = 466792447; + // When true, indicates that the client should refresh (rerun) the source query because the data may have been changed by the action. + bool refresh_query = 358658151; + ValidationError validation_errors = 428190968; + // Optional message returned by the data action server describing the state of the action that took place. This can be used to implement custom failure messages. If a failure is related to a particular form field, the server should send back a validation error instead. The Looker web UI does not currently display any message if the action indicates 'success', but may do so in the future. + string message = 477462531; +} + + +message DataActionUserState { + // User state data + string data = 3076010; + // Time in seconds until the state needs to be refreshed + int64 refresh_time = 46200399; +} + + +message Datagroup { + // Operations the current user is able to perform on this object + map can = 98256; + // UNIX timestamp at which this entry was created. + int64 created_at = 342420026; + // Unique ID of the datagroup + int64 id = 3355; + // Name of the model containing the datagroup. Unique when combined with name. + string model_name = 526217848; + // Name of the datagroup. Unique when combined with model_name. + string name = 3373707; + // UNIX timestamp before which cache entries are considered stale. Cannot be in the future. + int64 stale_before = 349731513; + // UNIX timestamp at which this entry trigger was last checked. + int64 trigger_check_at = 361567047; + // The message returned with the error of the last trigger check. + string trigger_error = 312027087; + // The value of the trigger when last checked. + string trigger_value = 308230133; + // UNIX timestamp at which this entry became triggered. Cannot be in the future. + int64 triggered_at = 380872987; +} + + +message DBConnection { + // Operations the current user is able to perform on this object + map can = 98256; + // Name of the connection. Also used as the unique identifier + string name = 3373707; + Dialect dialect = 413753737; + // SQL Runner snippets for this connection + repeated Snippet snippets = 513815286; + // True if PDTs are enabled on this connection + bool pdts_enabled = 5079435; + // Host name/address of server + string host = 3208616; + // Port number on server + int64 port = 3446913; + // Username for server authentication + string username = 265713450; + // (Write-Only) Password for server authentication + string password = 304246438; + // Whether the connection uses OAuth for authentication. + bool uses_oauth = 369224423; + // (Write-Only) Base64 encoded Certificate body for server authentication (when appropriate for dialect). + string certificate = 488099941; + // (Write-Only) Certificate keyfile type - .json or .p12 + string file_type = 329066488; + // Database name + string database = 447366238; + // Time zone of database + string db_timezone = 462146893; + // Timezone to use in queries + string query_timezone = 202424720; + // Scheme name + string schema = 453993775; + // Maximum number of concurrent connection to use + int64 max_connections = 426218758; + // Maximum size of query in GBs (BigQuery only, can be a user_attribute name) + string max_billing_gigabytes = 76432944; + // Use SSL/TLS when connecting to server + bool ssl = 114188; + // Verify the SSL + bool verify_ssl = 516339549; + // Name of temporary database (if used) + string tmp_db_name = 503272258; + // Additional params to add to JDBC connection string + string jdbc_additional_params = 377926166; + // Connection Pool Timeout, in seconds + int64 pool_timeout = 386536432; + // (Read/Write) SQL Dialect name + string dialect_name = 453584193; + // Creation date for this connection + string created_at = 342420026; + // Id of user who last modified this connection configuration + string user_id = 147132913; + // Is this an example connection? + bool example = 330742693; + // (Limited access feature) Are per user db credentials enabled. Enabling will remove previously set username and password + bool user_db_credentials = 401569003; + // Fields whose values map to user attribute names + repeated string user_attribute_fields = 501842924; + // Cron string specifying when maintenance such as PDT trigger checks and drops should be performed + string maintenance_cron = 408001806; + // Unix timestamp at start of last completed PDT trigger check process + string last_regen_at = 106929410; + // Unix timestamp at start of last completed PDT reap process + string last_reap_at = 413472721; + // Precache tables in the SQL Runner + bool sql_runner_precache_tables = 290662041; + // SQL statements (semicolon separated) to issue after connecting to the database. Requires `custom_after_connect_statements` license feature + string after_connect_statements = 39151260; + DBConnectionOverride pdt_context_override = 355387537; + // Is this connection created and managed by Looker + bool managed = 417630159; + // The Id of the ssh tunnel this connection uses + string tunnel_id = 313005097; + // Maximum number of threads to use to build PDTs in parallel + int64 pdt_concurrency = 395006102; + // When disable_context_comment is true comment will not be added to SQL + bool disable_context_comment = 390654690; + // An External OAuth Application to use for authenticating to the database + int64 oauth_application_id = 466979396; +} + + +message DBConnectionBase { + // Operations the current user is able to perform on this object + map can = 98256; + // Name of the connection. Also used as the unique identifier + string name = 3373707; + Dialect dialect = 413753737; + // SQL Runner snippets for this connection + repeated Snippet snippets = 513815286; + // True if PDTs are enabled on this connection + bool pdts_enabled = 5079435; +} + + +message DBConnectionOverride { + // Context in which to override (`pdt` is the only allowed value) + string context = 475765463; + // Host name/address of server + string host = 3208616; + // Port number on server + string port = 3446913; + // Username for server authentication + string username = 265713450; + // (Write-Only) Password for server authentication + string password = 304246438; + // Whether or not the password is overridden in this context + bool has_password = 445650520; + // (Write-Only) Base64 encoded Certificate body for server authentication (when appropriate for dialect). + string certificate = 488099941; + // (Write-Only) Certificate keyfile type - .json or .p12 + string file_type = 329066488; + // Database name + string database = 447366238; + // Scheme name + string schema = 453993775; + // Additional params to add to JDBC connection string + string jdbc_additional_params = 377926166; + // SQL statements (semicolon separated) to issue after connecting to the database. Requires `custom_after_connect_statements` license feature + string after_connect_statements = 39151260; +} + + +message DBConnectionTestResult { + // Operations the current user is able to perform on this object + map can = 98256; + // JDBC connection string. (only populated in the 'connect' test) + string connection_string = 271121883; + // Result message of test + string message = 477462531; + // Name of test + string name = 3373707; + // Result code of test + string status = 446240775; +} + + +message DelegateOauthTest { + // Delegate Oauth Connection Name + string name = 3373707; + // The ID of the installation target. For Slack, this would be workspace id. + string installation_target_id = 531552079; + // Installation ID + int64 installation_id = 409834416; + // Whether or not the test was successful + bool success = 466792447; +} + +// Status of the dependencies in your project. Valid values are: "lock_optional", "lock_required", "lock_error", "install_none". +enum DependencyStatus { + _DEPENDENCY_STATUS_UNSET = 0; + DEPENDENCY_STATUS_LOCK_OPTIONAL = 529686454; + DEPENDENCY_STATUS_LOCK_REQUIRED = 451155910; + DEPENDENCY_STATUS_LOCK_ERROR = 317341619; + DEPENDENCY_STATUS_INSTALL_NONE = 168704949; +} + + +message Dialect { + // The name of the dialect + string name = 3373707; + // The human-readable label of the connection + string label = 102727412; + // Whether the dialect supports query cost estimates + bool supports_cost_estimate = 499263120; + // PDT index columns + string persistent_table_indexes = 409191033; + // PDT sortkey columns + string persistent_table_sortkeys = 375046213; + // PDT distkey column + string persistent_table_distkey = 341338616; + // Suports streaming results + bool supports_streaming = 285904409; + // Should SQL Runner snippets automatically be run + bool automatically_run_sql_runner_snippets = 304805017; + // Array of names of the tests that can be run on a connection using this dialect + repeated string connection_tests = 303105120; + // Is supported with the inducer (i.e. generate from sql) + bool supports_inducer = 534911144; + // Can multiple databases be accessed from a connection using this dialect + bool supports_multiple_databases = 379600830; + // Whether the dialect supports allowing Looker to build persistent derived tables + bool supports_persistent_derived_tables = 473024582; + // Does the database have client SSL support settable through the JDBC string explicitly? + bool has_ssl_support = 405011613; +} + + +message DialectInfo { + // Operations the current user is able to perform on this object + map can = 98256; + // Default number max connections + string default_max_connections = 326961924; + // Default port number + string default_port = 325181984; + // Is the supporting driver installed + bool installed = 29046650; + // The human-readable label of the connection + string label = 102727412; + // What the dialect calls the equivalent of a normal SQL table + string label_for_database_equivalent = 317871990; + // The name of the dialect + string name = 3373707; + DialectInfoOptions supported_options = 345365763; +} + + +message DialectInfoOptions { + // Has additional params support + bool additional_params = 452971224; + // Has auth support + bool auth = 3005864; + // Has host support + bool host = 3208616; + // Has support for a service account + bool oauth_credentials = 347809451; + // Has project name support + bool project_name = 469694456; + // Has schema support + bool schema = 453993775; + // Has SSL support + bool ssl = 114188; + // Has timezone support + bool timezone = 519056897; + // Has tmp table support + bool tmp_table = 463763795; + // Username is required + bool username_required = 358259002; +} + + +message DigestEmails { + // Whether or not digest emails are enabled + bool is_enabled = 109284052; +} + + +message DigestEmailSend { + // True if content was successfully generated and delivered + bool configuration_delivered = 415144673; +} + + +message DiscretePalette { + // Unique identity string + string id = 3355; + // Label for palette + string label = 102727412; + // Type of palette + string type = 3575610; + // Array of colors in the palette + repeated string colors = 338710692; +} + + +message EmbedParams { + // The complete URL of the Looker UI page to display in the embed context. For example, to display the dashboard with id 34, `target_url` would look like: `https://mycompany.looker.com:9999/dashboards/34`. `target_uri` MUST contain a scheme (HTTPS), domain name, and URL path. Port must be included if it is required to reach the Looker server from browser clients. If the Looker instance is behind a load balancer or other proxy, `target_uri` must be the public-facing domain name and port required to reach the Looker instance, not the actual internal network machine name of the Looker instance. + string target_url = 486946241; + // Number of seconds the SSO embed session will be valid after the embed session is started. Defaults to 300 seconds. Maximum session length accepted is 2592000 seconds (30 days). + int64 session_length = 427421739; + // When true, the embed session will purge any residual Looker login state (such as in browser cookies) before creating a new login state with the given embed user info. Defaults to true. + bool force_logout_login = 429771950; +} + + +message EmbedSsoParams { + // The complete URL of the Looker UI page to display in the embed context. For example, to display the dashboard with id 34, `target_url` would look like: `https://mycompany.looker.com:9999/dashboards/34`. `target_uri` MUST contain a scheme (HTTPS), domain name, and URL path. Port must be included if it is required to reach the Looker server from browser clients. If the Looker instance is behind a load balancer or other proxy, `target_uri` must be the public-facing domain name and port required to reach the Looker instance, not the actual internal network machine name of the Looker instance. + string target_url = 486946241; + // Number of seconds the SSO embed session will be valid after the embed session is started. Defaults to 300 seconds. Maximum session length accepted is 2592000 seconds (30 days). + int64 session_length = 427421739; + // When true, the embed session will purge any residual Looker login state (such as in browser cookies) before creating a new login state with the given embed user info. Defaults to true. + bool force_logout_login = 429771950; + // A value from an external system that uniquely identifies the embed user. Since the user_ids of Looker embed users may change with every embed session, external_user_id provides a way to assign a known, stable user identifier across multiple embed sessions. + string external_user_id = 438485145; + // First name of the embed user. Defaults to 'Embed' if not specified + string first_name = 160985414; + // Last name of the embed user. Defaults to 'User' if not specified + string last_name = 503280549; + // Sets the user timezone for the embed user session, if the User Specific Timezones setting is enabled in the Looker admin settings. A value of `null` forces the embed user to use the Looker Application Default Timezone. You MUST omit this property from the request if the User Specific Timezones setting is disabled. Timezone values are validated against the IANA Timezone standard and can be seen in the Application Time Zone dropdown list on the Looker General Settings admin page. + string user_timezone = 506419076; + // List of Looker permission names to grant to the embed user. Requested permissions will be filtered to permissions allowed for embed sessions. + repeated string permissions = 283426081; + // List of model names that the embed user may access + repeated string models = 534399691; + // List of Looker group ids in which to enroll the embed user + repeated int64 group_ids = 370665154; + // A unique value identifying an embed-exclusive group. Multiple embed users using the same `external_group_id` value will be able to share Looker content with each other. Content and embed users associated with the `external_group_id` will not be accessible to normal Looker users or embed users not associated with this `external_group_id`. + int64 external_group_id = 515008972; + // A dictionary of name-value pairs associating a Looker user attribute name with a value. + map user_attributes = 435197330; + // Id of the embed secret to use to sign this SSO url. If specified, the value must be an id of a valid (active) secret defined in the Looker instance. If not specified, the URL will be signed with the newest active embed secret defined in the Looker instance. + int64 secret_id = 369773947; +} + + +message EmbedUrlResponse { + // The embed URL. Any modification to this string will make the URL unusable. + string url = 116079; +} + + +message Error { + // Error details + string message = 477462531; + // Documentation link + string documentation_url = 282180250; +} + + +message ExternalOauthApplication { + // Operations the current user is able to perform on this object + map can = 98256; + // ID of this OAuth Application + int64 id = 3355; + // The name of this application. For Snowflake connections, this should be the name of the host database. + string name = 3373707; + // The OAuth Client ID for this application + string client_id = 476022396; + // (Write-Only) The OAuth Client Secret for this application + string client_secret = 278906578; + // The database dialect for this application. + string dialect_name = 453584193; + // Creation time for this application + google.protobuf.Timestamp created_at = 342420026; +} + +// The style of dimension fill that is possible for this field. Null if no dimension fill is possible. Valid values are: "enumeration", "range". +enum FillStyle { + _FILL_STYLE_UNSET = 0; + FILL_STYLE_ENUMERATION = 426018000; + FILL_STYLE_RANGE = 273453555; +} + + +message Folder { + // Unique Name + string name = 3373707; + // Id of Parent. If the parent id is null, this is a root-level entry + string parent_id = 517581876; + // Unique Id + string id = 3355; + // Id of content metadata + int64 content_metadata_id = 293222822; + // Time the space was created + google.protobuf.Timestamp created_at = 342420026; + // User Id of Creator + int64 creator_id = 344833155; + // Children Count + int64 child_count = 477810154; + // Embedder's Id if this folder was autogenerated as an embedding shared folder via 'external_group_id' in an SSO embed login + string external_id = 288268924; + // Folder is an embed folder + bool is_embed = 111589252; + // Folder is the root embed shared folder + bool is_embed_shared_root = 289654239; + // Folder is the root embed users folder + bool is_embed_users_root = 533781034; + // Folder is a user's personal folder + bool is_personal = 291866187; + // Folder is descendant of a user's personal folder + bool is_personal_descendant = 429873891; + // Folder is the root shared folder + bool is_shared_root = 508018572; + // Folder is the root user folder + bool is_users_root = 499284718; + // Operations the current user is able to perform on this object + map can = 98256; + // Dashboards + repeated DashboardBase dashboards = 469015063; + // Looks + repeated LookWithDashboards looks = 103157172; +} + + +message FolderBase { + // Unique Name + string name = 3373707; + // Id of Parent. If the parent id is null, this is a root-level entry + string parent_id = 517581876; + // Unique Id + string id = 3355; + // Id of content metadata + int64 content_metadata_id = 293222822; + // Time the folder was created + google.protobuf.Timestamp created_at = 342420026; + // User Id of Creator + int64 creator_id = 344833155; + // Children Count + int64 child_count = 477810154; + // Embedder's Id if this folder was autogenerated as an embedding shared folder via 'external_group_id' in an SSO embed login + string external_id = 288268924; + // Folder is an embed folder + bool is_embed = 111589252; + // Folder is the root embed shared folder + bool is_embed_shared_root = 289654239; + // Folder is the root embed users folder + bool is_embed_users_root = 533781034; + // Folder is a user's personal folder + bool is_personal = 291866187; + // Folder is descendant of a user's personal folder + bool is_personal_descendant = 429873891; + // Folder is the root shared folder + bool is_shared_root = 508018572; + // Folder is the root user folder + bool is_users_root = 499284718; + // Operations the current user is able to perform on this object + map can = 98256; +} + +// Specifies the data format of the region information. Valid values are: "topojson", "vector_tile_region". +enum Format { + _FORMAT_UNSET = 0; + FORMAT_TOPOJSON = 313154326; + FORMAT_VECTOR_TILE_REGION = 387763843; +} + +// Scheme that is running on application server (for PRs, file browsing, etc.) Valid values are: "http", "https". +enum GitApplicationServerHttpScheme { + _GIT_APPLICATION_SERVER_HTTP_SCHEME_UNSET = 0; + GIT_APPLICATION_SERVER_HTTP_SCHEME_HTTP = 392265021; + GIT_APPLICATION_SERVER_HTTP_SCHEME_HTTPS = 349055574; +} + + +message GitBranch { + // Operations the current user is able to perform on this object + map can = 98256; + // The short name on the local. Updating `name` results in `git checkout ` + string name = 3373707; + // The name of the remote + string remote = 467305437; + // The short name on the remote + string remote_name = 517186119; + // Name of error + string error = 96784904; + // Message describing an error if present + string message = 477462531; + // Name of the owner of a personal branch + string owner_name = 278559819; + // Whether or not this branch is readonly + bool readonly = 433365215; + // Whether or not this branch is a personal branch - readonly for all developers except the owner + bool personal = 443164224; + // Whether or not a local ref exists for the branch + bool is_local = 118114326; + // Whether or not a remote ref exists for the branch + bool is_remote = 470571781; + // Whether or not this is the production branch + bool is_production = 236569874; + // Number of commits the local branch is ahead of the remote + int64 ahead_count = 356439415; + // Number of commits the local branch is behind the remote + int64 behind_count = 324674445; + // UNIX timestamp at which this branch was last committed. + int64 commit_at = 372790769; + // The resolved ref of this branch. Updating `ref` results in `git reset --hard ``. + string ref = 112787; + // The resolved ref of this branch remote. + string remote_ref = 520824461; +} + + +message GitConnectionTest { + // Operations the current user is able to perform on this object + map can = 98256; + // Human readable string describing the test + string description = 431136513; + // A short string, uniquely naming this test + string id = 3355; +} + + +message GitConnectionTestResult { + // Operations the current user is able to perform on this object + map can = 98256; + // A short string, uniquely naming this test + string id = 3355; + // Additional data from the test + string message = 477462531; + // Either 'pass' or 'fail' + string status = 446240775; +} + + +message GitStatus { + // Git action: add, delete, etc + string action = 355737714; + // When true, changes to the local file conflict with the remote repository + bool conflict = 290023959; + // When true, the file can be reverted to an earlier state + bool revertable = 390695265; + // Git description of the action + string text = 3556653; +} + + +message Group { + // Operations the current user is able to perform on this object + map can = 98256; + // Group can be used in content access controls + bool can_add_to_content_metadata = 97036652; + // Currently logged in user is group member + bool contains_current_user = 520764295; + // External Id group if embed group + string external_group_id = 515008972; + // Group membership controlled outside of Looker + bool externally_managed = 317189570; + // Unique Id + int64 id = 3355; + // New users are added to this group by default + bool include_by_default = 413560172; + // Name of group + string name = 3373707; + // Number of users included in this group + int64 user_count = 479975142; +} + + +message GroupHierarchy { + // Operations the current user is able to perform on this object + map can = 98256; + // Group can be used in content access controls + bool can_add_to_content_metadata = 97036652; + // Currently logged in user is group member + bool contains_current_user = 520764295; + // External Id group if embed group + string external_group_id = 515008972; + // Group membership controlled outside of Looker + bool externally_managed = 317189570; + // Unique Id + int64 id = 3355; + // New users are added to this group by default + bool include_by_default = 413560172; + // Name of group + string name = 3373707; + // Number of users included in this group + int64 user_count = 479975142; + // IDs of parents of this group + repeated int64 parent_group_ids = 533743336; + // Role IDs assigned to group + repeated int64 role_ids = 266265745; +} + + +message GroupIdForGroupInclusion { + // Id of group + int64 group_id = 506361563; +} + + +message GroupIdForGroupUserInclusion { + // Id of user + int64 user_id = 147132913; +} + + +message GroupSearch { + // Operations the current user is able to perform on this object + map can = 98256; + // Group can be used in content access controls + bool can_add_to_content_metadata = 97036652; + // Currently logged in user is group member + bool contains_current_user = 520764295; + // External Id group if embed group + string external_group_id = 515008972; + // Group membership controlled outside of Looker + bool externally_managed = 317189570; + // Unique Id + int64 id = 3355; + // New users are added to this group by default + bool include_by_default = 413560172; + // Name of group + string name = 3373707; + // Number of users included in this group + int64 user_count = 479975142; + // Roles assigned to group + repeated Role roles = 108695229; +} + + +message HomepageItem { + // Operations the current user is able to perform on this object + map can = 98256; + // Name of user who created the content this item is based on + string content_created_by = 310271269; + // Content favorite id associated with the item this content is based on + int64 content_favorite_id = 403544586; + // Content metadata id associated with the item this content is based on + int64 content_metadata_id = 293222822; + // Last time the content that this item is based on was updated + string content_updated_at = 424059459; + // Custom description entered by the user, if present + string custom_description = 276109801; + // (Write-Only) base64 encoded image data + string custom_image_data_base64 = 133974354; + // Custom image_url entered by the user, if present + string custom_image_url = 84288829; + // Custom title entered by the user, if present + string custom_title = 365526021; + // Custom url entered by the user, if present + string custom_url = 395339799; + // Dashboard to base this item on + int64 dashboard_id = 496187565; + // The actual description for display + string description = 431136513; + // Number of times content has been favorited, if present + int64 favorite_count = 358762965; + // Associated Homepage Section + int64 homepage_section_id = 398167145; + // Unique Id + int64 id = 3355; + // The actual image_url for display + string image_url = 438911930; + // The container folder name of the content + string location = 475260909; + // Look to base this item on + int64 look_id = 349778619; + // LookML Dashboard to base this item on + string lookml_dashboard_id = 339184839; + // An arbitrary integer representing the sort order within the section + int64 order = 106006350; + // Number of seconds it took to fetch the section this item is in + float section_fetch_time = 506652826; + // The actual title for display + string title = 110371416; + // The actual url for display + string url = 116079; + // Whether the custom description should be used instead of the content description, if the item is associated with content + bool use_custom_description = 302018950; + // Whether the custom image should be used instead of the content image, if the item is associated with content + bool use_custom_image = 531254114; + // Whether the custom title should be used instead of the content title, if the item is associated with content + bool use_custom_title = 536283105; + // Whether the custom url should be used instead of the content url, if the item is associated with content + bool use_custom_url = 314245462; + // Number of times content has been viewed, if present + int64 view_count = 383588418; +} + + +message HomepageSection { + // Operations the current user is able to perform on this object + map can = 98256; + // Time at which this section was created. + google.protobuf.Timestamp created_at = 342420026; + // Time at which this section was deleted. + google.protobuf.Timestamp deleted_at = 441264006; + // A URL pointing to a page showing further information about the content in the section. + string detail_url = 517980607; + // Id reference to parent homepage + int64 homepage_id = 360012995; + // Items in the homepage section + repeated HomepageItem homepage_items = 459801588; + // Unique Id + int64 id = 3355; + // Is this a header section (has no items) + bool is_header = 378615903; + // ids of the homepage items in the order they should be displayed + repeated int64 item_order = 488938807; + // Name of row + string title = 110371416; + // Time at which this section was last updated. + google.protobuf.Timestamp updated_at = 295464393; + // Description of the content found in this section. + string description = 431136513; +} + + +message ImportedProject { + // Dependency name + string name = 3373707; + // Url for a remote dependency + string url = 116079; + // Ref for a remote dependency + string ref = 112787; + // Flag signifying if a dependency is remote or local + bool is_remote = 470571781; +} + + +message Integration { + // Operations the current user is able to perform on this object + map can = 98256; + // ID of the integration. + string id = 3355; + // ID of the integration hub. + int64 integration_hub_id = 485110416; + // Label for the integration. + string label = 102727412; + // Description of the integration. + string description = 431136513; + // Whether the integration is available to users. + bool enabled = 402398511; + // Array of params for the integration. + repeated IntegrationParam params = 497713981; + // A list of data formats the integration supports. If unspecified, the default is all data formats. Valid values are: "txt", "csv", "inline_json", "json", "json_label", "json_detail", "json_detail_lite_stream", "xlsx", "html", "wysiwyg_pdf", "assembled_pdf", "wysiwyg_png", "csv_zip". + repeated SupportedFormats supported_formats = 488373554; + // A list of action types the integration supports. Valid values are: "cell", "query", "dashboard". + repeated SupportedActionTypes supported_action_types = 526489160; + // A list of formatting options the integration supports. If unspecified, defaults to all formats. Valid values are: "formatted", "unformatted". + repeated SupportedFormattings supported_formattings = 516035485; + // A list of visualization formatting options the integration supports. If unspecified, defaults to all formats. Valid values are: "apply", "noapply". + repeated SupportedVisualizationFormattings supported_visualization_formattings = 377097887; + // A list of all the download mechanisms the integration supports. The order of values is not significant: Looker will select the most appropriate supported download mechanism for a given query. The integration must ensure it can handle any of the mechanisms it claims to support. If unspecified, this defaults to all download setting values. Valid values are: "push", "url". + repeated SupportedDownloadSettings supported_download_settings = 452598570; + // URL to an icon for the integration. + string icon_url = 368794027; + // Whether the integration uses oauth. + bool uses_oauth = 369224423; + // A list of descriptions of required fields that this integration is compatible with. If there are multiple entries in this list, the integration requires more than one field. If unspecified, no fields will be required. + repeated IntegrationRequiredField required_fields = 395601836; + // Whether the integration uses delegate oauth, which allows federation between an integration installation scope specific entity (like org, group, and team, etc.) and Looker. + bool delegate_oauth = 412079575; + // Whether the integration is available to users. + repeated int64 installed_delegate_oauth_targets = 448297026; +} + + +message IntegrationHub { + // Operations the current user is able to perform on this object + map can = 98256; + // ID of the hub. + int64 id = 3355; + // URL of the hub. + string url = 116079; + // Label of the hub. + string label = 102727412; + // Whether this hub is a first-party integration hub operated by Looker. + bool official = 382644874; + // An error message, present if the integration hub metadata could not be fetched. If this is present, the integration hub is unusable. + string fetch_error_message = 324681221; + // (Write-Only) An authorization key that will be sent to the integration hub on every request. + string authorization_token = 369213356; + // Whether the authorization_token is set for the hub. + bool has_authorization_token = 312331662; + // Whether the legal agreement message has been signed by the user. This only matters if legal_agreement_required is true. + bool legal_agreement_signed = 93078455; + // Whether the legal terms for the integration hub are required before use. + bool legal_agreement_required = 362720929; + // The legal agreement text for this integration hub. + string legal_agreement_text = 529546684; +} + + +message IntegrationParam { + // Name of the parameter. + string name = 3373707; + // Label of the parameter. + string label = 102727412; + // Short description of the parameter. + string description = 431136513; + // Whether the parameter is required to be set to use the destination. If unspecified, this defaults to false. + bool required = 393139297; + // Whether the parameter has a value set. + bool has_value = 72657804; + // The current value of the parameter. Always null if the value is sensitive. When writing, null values will be ignored. Set the value to an empty string to clear it. + string value = 111972721; + // When present, the param's value comes from this user attribute instead of the 'value' parameter. Set to null to use the 'value'. + string user_attribute_name = 494434128; + // Whether the parameter contains sensitive data like API credentials. If unspecified, this defaults to true. + bool sensitive = 465391254; + // When true, this parameter must be assigned to a user attribute in the admin panel (instead of a constant value), and that value may be updated by the user as part of the integration flow. + bool per_user = 424877485; + // When present, the param represents the oauth url the user will be taken to. + string delegate_oauth_url = 100182643; +} + + +message IntegrationRequiredField { + // Matches a field that has this tag. + string tag = 114586; + // If present, supercedes 'tag' and matches a field that has any of the provided tags. + repeated string any_tag = 421279388; + // If present, supercedes 'tag' and matches a field that has all of the provided tags. + repeated string all_tags = 449565693; +} + + +message IntegrationTestResult { + // Whether or not the test was successful + bool success = 466792447; + // A message representing the results of the test. + string message = 477462531; + // An array of connection test result for delegate oauth actions. + repeated DelegateOauthTest delegate_oauth_result = 363584319; +} + + +message InternalHelpResources { + // Operations the current user is able to perform on this object + map can = 98256; + // If true and internal help resources content is not blank then the link for internal help resources will be shown in the help menu and the content displayed within Looker + bool enabled = 402398511; +} + + +message InternalHelpResourcesContent { + // Operations the current user is able to perform on this object + map can = 98256; + // Text to display in the help menu item which will display the internal help resources + string organization_name = 425834740; + // Content to be displayed in the internal help resources page/modal + string markdown_content = 165250775; +} + + +message LDAPConfig { + // Operations the current user is able to perform on this object + map can = 98256; + // Allow alternate email-based login via '/login/email' for admins and for specified users with the 'login_special_email' permission. This option is useful as a fallback during ldap setup, if ldap config problems occur later, or if you need to support some users who are not in your ldap directory. Looker email/password logins are always disabled for regular users when ldap is enabled. + bool alternate_email_login_allowed = 332315627; + // (Write-Only) Password for the LDAP account used to access the LDAP server + string auth_password = 415503534; + // Users will not be allowed to login at all unless a role for them is found in LDAP if set to true + bool auth_requires_role = 488630792; + // Distinguished name of LDAP account used to access the LDAP server + string auth_username = 474550684; + // LDAP server hostname + string connection_host = 365749556; + // LDAP host port + string connection_port = 365868705; + // Use Transport Layer Security + bool connection_tls = 317633542; + // Do not verify peer when using TLS + bool connection_tls_no_verify = 432426857; + // (Write-Only) Array of ids of groups that will be applied to new users the first time they login via LDAP + repeated int64 default_new_user_group_ids = 91703103; + // (Read-only) Groups that will be applied to new users the first time they login via LDAP + repeated Group default_new_user_groups = 381653162; + // (Write-Only) Array of ids of roles that will be applied to new users the first time they login via LDAP + repeated int64 default_new_user_role_ids = 429566625; + // (Read-only) Roles that will be applied to new users the first time they login via LDAP + repeated Role default_new_user_roles = 435450942; + // Enable/Disable LDAP authentication for the server + bool enabled = 402398511; + // Don't attempt to do LDAP search result paging (RFC 2696) even if the LDAP server claims to support it. + bool force_no_page = 344726209; + // (Read-only) Array of mappings between LDAP Groups and Looker Roles + repeated LDAPGroupRead groups = 309365131; + // Base dn for finding groups in LDAP searches + string groups_base_dn = 456083027; + // Identifier for a strategy for how Looker will search for groups in the LDAP server + string groups_finder_type = 336365796; + // LDAP Group attribute that signifies the members of the groups. Most commonly 'member' + string groups_member_attribute = 281577616; + // Optional comma-separated list of supported LDAP objectclass for groups when doing groups searches + string groups_objectclasses = 270841289; + // LDAP Group attribute that signifies the user in a group. Most commonly 'dn' + string groups_user_attribute = 467193795; + // (Read/Write) Array of mappings between LDAP Groups and arrays of Looker Role ids + repeated LDAPGroupWrite groups_with_role_ids = 475300071; + // (Read-only) Has the password been set for the LDAP account used to access the LDAP server + bool has_auth_password = 349728411; + // Merge first-time ldap login to existing user account by email addresses. When a user logs in for the first time via ldap this option will connect this user into their existing account by finding the account with a matching email address. Otherwise a new user account will be created for the user. + bool merge_new_users_by_email = 391067523; + // When this config was last modified + string modified_at = 386047141; + // User id of user who last modified this config + string modified_by = 386047132; + // Set user roles in Looker based on groups from LDAP + bool set_roles_from_groups = 441288982; + // (Write-Only) Test LDAP user password. For ldap tests only. + string test_ldap_password = 71095482; + // (Write-Only) Test LDAP user login id. For ldap tests only. + string test_ldap_user = 159290890; + // Name of user record attributes used to indicate email address field + string user_attribute_map_email = 384506455; + // Name of user record attributes used to indicate first name + string user_attribute_map_first_name = 401269830; + // Name of user record attributes used to indicate last name + string user_attribute_map_last_name = 506361433; + // Name of user record attributes used to indicate unique record id + string user_attribute_map_ldap_id = 271507598; + // (Read-only) Array of mappings between LDAP User Attributes and Looker User Attributes + repeated LDAPUserAttributeRead user_attributes = 435197330; + // (Read/Write) Array of mappings between LDAP User Attributes and arrays of Looker User Attribute ids + repeated LDAPUserAttributeWrite user_attributes_with_ids = 514666905; + // Distinguished name of LDAP node used as the base for user searches + string user_bind_base_dn = 468855466; + // (Optional) Custom RFC-2254 filter clause for use in finding user during login. Combined via 'and' with the other generated filter clauses. + string user_custom_filter = 359623314; + // Name(s) of user record attributes used for matching user login id (comma separated list) + string user_id_attribute_names = 495858029; + // (Optional) Name of user record objectclass used for finding user during login id + string user_objectclass = 365251017; + // Allow LDAP auth'd users to be members of non-reflected Looker groups. If 'false', user will be removed from non-reflected groups on login. + bool allow_normal_group_membership = 480346046; + // LDAP auth'd users will be able to inherit roles from non-reflected Looker groups. + bool allow_roles_from_normal_groups = 302729383; + // Allows roles to be directly assigned to LDAP auth'd users. + bool allow_direct_roles = 315512839; + // Link to get this item + string url = 116079; +} + + +message LDAPConfigTestIssue { + // Severity of the issue. Error or Warning + string severity = 369575103; + // Message describing the issue + string message = 477462531; +} + + +message LDAPConfigTestResult { + // Additional details for error cases + string details = 389430416; + // Array of issues/considerations about the result + repeated LDAPConfigTestIssue issues = 294789969; + // Short human readable test about the result + string message = 477462531; + // Test status code: always 'success' or 'error' + string status = 446240775; + // A more detailed trace of incremental results during auth tests + string trace = 110620997; + LDAPUser user = 3599307; + // Link to ldap config + string url = 116079; +} + + +message LDAPGroupRead { + // Unique Id + int64 id = 3355; + // Unique Id of group in Looker + int64 looker_group_id = 304537367; + // Name of group in Looker + string looker_group_name = 301354167; + // Name of group in LDAP + string name = 3373707; + // Looker Roles + repeated Role roles = 108695229; + // Link to ldap config + string url = 116079; +} + + +message LDAPGroupWrite { + // Unique Id + int64 id = 3355; + // Unique Id of group in Looker + int64 looker_group_id = 304537367; + // Name of group in Looker + string looker_group_name = 301354167; + // Name of group in LDAP + string name = 3373707; + // Looker Role Ids + repeated int64 role_ids = 266265745; + // Link to ldap config + string url = 116079; +} + + +message LDAPUser { + // Array of user's email addresses and aliases for use in migration + repeated string all_emails = 283783469; + // Dictionary of user's attributes (name/value) + map attributes = 405645655; + // Primary email address + string email = 96619420; + // First name + string first_name = 160985414; + // Array of user's groups (group names only) + repeated string groups = 309365131; + // Last Name + string last_name = 503280549; + // LDAP's distinguished name for the user record + string ldap_dn = 22077474; + // LDAP's Unique ID for the user + string ldap_id = 22077619; + // Array of user's roles (role names only) + repeated string roles = 108695229; + // Link to ldap config + string url = 116079; +} + + +message LDAPUserAttributeRead { + // Name of User Attribute in LDAP + string name = 3373707; + // Required to be in LDAP assertion for login to be allowed to succeed + bool required = 393139297; + // Looker User Attributes + repeated UserAttribute user_attributes = 435197330; + // Link to ldap config + string url = 116079; +} + + +message LDAPUserAttributeWrite { + // Name of User Attribute in LDAP + string name = 3373707; + // Required to be in LDAP assertion for login to be allowed to succeed + bool required = 393139297; + // Looker User Attribute Ids + repeated int64 user_attribute_ids = 468967351; + // Link to ldap config + string url = 116079; +} + + +message LegacyFeature { + // Operations the current user is able to perform on this object + map can = 98256; + // Unique Id + string id = 3355; + // Name + string name = 3373707; + // Description + string description = 431136513; + // Whether this feature has been enabled by a user + bool enabled_locally = 472119345; + // Whether this feature is currently enabled + bool enabled = 402398511; + // Looker version where this feature became a legacy feature + string disallowed_as_of_version = 388206984; + // Looker version where this feature will be automatically disabled + string disable_on_upgrade_to_version = 357209432; + // Future Looker version where this feature will be removed + string end_of_life_version = 269176985; + // URL for documentation about this feature + string documentation_url = 282180250; + // Approximate date that this feature will be automatically disabled. + google.protobuf.Timestamp approximate_disable_date = 435078408; + // Approximate date that this feature will be removed. + google.protobuf.Timestamp approximate_end_of_life_date = 413188266; + // Whether this legacy feature may have been automatically disabled when upgrading to the current version. + bool has_disabled_on_upgrade = 465129201; +} + +// Name of the command Valid values are: "dashboard", "lookml_dashboard". +enum LinkedContentType { + _LINKED_CONTENT_TYPE_UNSET = 0; + LINKED_CONTENT_TYPE_DASHBOARD = 278150833; + LINKED_CONTENT_TYPE_LOOKML_DASHBOARD = 441682131; +} + + +message Locale { + // Code for Locale + string code = 3059181; + // Name of Locale in its own language + string native_name = 437737068; + // Name of Locale in English + string english_name = 277569798; +} + + +message LocalizationSettings { + // Default locale for localization + string default_locale = 487618182; + // Localization level - strict or permissive + string localization_level = 337718167; +} + + +message Look { + // Operations the current user is able to perform on this object + map can = 98256; + // Id of content metadata + int64 content_metadata_id = 293222822; + // Unique Id + int64 id = 3355; + // Look Title + string title = 110371416; + // User Id + int64 user_id = 147132913; + // Content Favorite Id + int64 content_favorite_id = 403544586; + // Time that the Look was created. + google.protobuf.Timestamp created_at = 342420026; + // Whether or not a look is 'soft' deleted. + bool deleted = 387615750; + // Time that the Look was deleted. + google.protobuf.Timestamp deleted_at = 441264006; + // Id of User that deleted the look. + int64 deleter_id = 441368332; + // Description + string description = 431136513; + // Embed Url + string embed_url = 352154685; + // Excel File Url + string excel_file_url = 493227475; + // Number of times favorited + int64 favorite_count = 358762965; + // Google Spreadsheet Formula + string google_spreadsheet_formula = 434476859; + // Image Embed Url + string image_embed_url = 469465270; + // auto-run query when Look viewed + bool is_run_on_load = 494569088; + // Time that the Look was last accessed by any user + google.protobuf.Timestamp last_accessed_at = 391609649; + // Id of User that last updated the look. + int64 last_updater_id = 328160870; + // Time last viewed in the Looker web UI + google.protobuf.Timestamp last_viewed_at = 273134349; + LookModel model = 104069929; + // Is Public + bool public = 488711883; + // Public Slug + string public_slug = 270024015; + // Public Url + string public_url = 476205734; + // Query Id + int64 query_id = 291663619; + // Short Url + string short_url = 506923317; + FolderBase folder = 317241572; + // Folder Id + string folder_id = 527488652; + // Time that the Look was updated. + google.protobuf.Timestamp updated_at = 295464393; + // Number of times viewed in the Looker web UI + int64 view_count = 383588418; +} + + +message LookBasic { + // Operations the current user is able to perform on this object + map can = 98256; + // Id of content metadata + int64 content_metadata_id = 293222822; + // Unique Id + int64 id = 3355; + // Look Title + string title = 110371416; + // User Id + int64 user_id = 147132913; +} + + +message LookmlModel { + // Operations the current user is able to perform on this object + map can = 98256; + // Array of names of connections this model is allowed to use + repeated string allowed_db_connection_names = 482839655; + // Array of explores (if has_content) + repeated LookmlModelNavExplore explores = 482224624; + // Does this model declaration have have lookml content? + bool has_content = 456150261; + // UI-friendly name for this model + string label = 102727412; + // Name of the model. Also used as the unique identifier + string name = 3373707; + // Name of project containing the model + string project_name = 469694456; + // Is this model allowed to use all current and future connections + bool unlimited_db_connections = 419500594; +} + + +message LookmlModelExplore { + // Fully qualified explore name (model name plus explore name) + string id = 3355; + // Explore name + string name = 3373707; + // Description + string description = 431136513; + // Label + string label = 102727412; + // Explore title + string title = 110371416; + // Scopes + repeated string scopes = 453884336; + // Can Total + bool can_total = 376021525; + // Can Develop LookML + bool can_develop = 510023049; + // Can See LookML + bool can_see_lookml = 272702243; + // A URL linking to the definition of this explore in the LookML IDE. + string lookml_link = 315140153; + // Can Save + bool can_save = 126460788; + // Can Explain + bool can_explain = 406068104; + // Can pivot in the DB + bool can_pivot_in_db = 81932716; + // Can use subtotals + bool can_subtotal = 427786093; + // Has timezone support + bool has_timezone_support = 438214435; + // Cost estimates supported + bool supports_cost_estimate = 499263120; + // Connection name + string connection_name = 365832102; + // How nulls are sorted, possible values are "low", "high", "first" and "last" + string null_sort_treatment = 345326183; + // List of model source files + repeated string files = 97434231; + // Primary source_file file + string source_file = 85057760; + // Name of project + string project_name = 469694456; + // Name of model + string model_name = 526217848; + // Name of view + string view_name = 393300486; + // Is hidden + bool hidden = 304371861; + // A sql_table_name expression that defines what sql table the view/explore maps onto. Example: "prod_orders2 AS orders" in a view named orders. + string sql_table_name = 459754643; + // (DEPRECATED) Array of access filter field names + repeated string access_filter_fields = 472279153; + // Access filters + repeated LookmlModelExploreAccessFilter access_filters = 494874128; + // Aliases + repeated LookmlModelExploreAlias aliases = 457267329; + // Always filter + repeated LookmlModelExploreAlwaysFilter always_filter = 506242538; + // Conditionally filter + repeated LookmlModelExploreConditionallyFilter conditionally_filter = 467462830; + // Array of index fields + repeated string index_fields = 349446854; + // Sets + repeated LookmlModelExploreSet sets = 3526737; + // An array of arbitrary string tags provided in the model for this explore. + repeated string tags = 3552281; + // Errors + repeated LookmlModelExploreError errors = 323658789; + LookmlModelExploreFieldset fields = 318677073; + // Views joined into this explore + repeated LookmlModelExploreJoins joins = 101304457; + // Label used to group explores in the navigation menus + string group_label = 273739597; + // An array of items describing which custom measure types are supported for creating a custom measure 'based_on' each possible dimension type. + repeated LookmlModelExploreSupportedMeasureType supported_measure_types = 277815363; +} + + +message LookmlModelExploreAccessFilter { + // Field to be filtered + string field = 97427706; + // User attribute name + string user_attribute = 395043786; +} + + +message LookmlModelExploreAlias { + // Name + string name = 3373707; + // Value + string value = 111972721; +} + + +message LookmlModelExploreAlwaysFilter { + // Name + string name = 3373707; + // Value + string value = 111972721; +} + + +message LookmlModelExploreConditionallyFilter { + // Name + string name = 3373707; + // Value + string value = 111972721; +} + + +message LookmlModelExploreError { + // Error Message + string message = 477462531; + // Details + google.protobuf.Any details = 389430416; + // Error source location + string error_pos = 329871261; + // Is this a field error + bool field_error = 344381761; +} + + +message LookmlModelExploreField { + // The appropriate horizontal text alignment the values of this field should be displayed in. Valid values are: "left", "right". + Align align = 92903173; + // Whether it's possible to filter on this field. + bool can_filter = 408701006; + // Field category Valid values are: "parameter", "filter", "measure", "dimension". + Category category = 50511102; + // The default value that this field uses when filtering. Null if there is no default value. + string default_filter_value = 251606232; + // Description + string description = 431136513; + // An array enumerating all the possible values that this field can contain. When null, there is no limit to the set of possible values this field can contain. + repeated LookmlModelExploreFieldEnumeration enumerations = 8797964; + // An error message indicating a problem with the definition of this field. If there are no errors, this will be null. + string error = 96784904; + // A label creating a grouping of fields. All fields with this label should be presented together when displayed in a UI. + string field_group_label = 348076708; + // When presented in a field group via field_group_label, a shorter name of the field to be displayed in that context. + string field_group_variant = 493011696; + // The style of dimension fill that is possible for this field. Null if no dimension fill is possible. Valid values are: "enumeration", "range". + FillStyle fill_style = 233043979; + // An offset (in months) from the calendar start month to the fiscal start month defined in the LookML model this field belongs to. + int64 fiscal_month_offset = 496032004; + // Whether this field has a set of allowed_values specified in LookML. + bool has_allowed_values = 372703215; + // Whether this field should be hidden from the user interface. + bool hidden = 304371861; + // Whether this field is a filter. + bool is_filter = 405226473; + // Whether this field represents a fiscal time value. + bool is_fiscal = 405130438; + // Whether this field is of a type that represents a numeric value. + bool is_numeric = 500103944; + // Whether this field is of a type that represents a time value. + bool is_timeframe = 317011333; + // Whether this field can be time filtered. + bool can_time_filter = 467147362; + LookmlModelExploreFieldTimeInterval time_interval = 343756027; + // Fully-qualified human-readable label of the field. + string label = 102727412; + // The name of the parameter that will provide a parameterized label for this field, if available in the current context. + string label_from_parameter = 295803007; + // The human-readable label of the field, without the view label. + string label_short = 404447089; + // A URL linking to the definition of this field in the LookML IDE. + string lookml_link = 315140153; + LookmlModelExploreFieldMapLayer map_layer = 312138283; + // Whether this field is a measure. + bool measure = 469160623; + // Fully-qualified name of the field. + string name = 3373707; + // If yes, the field will not be localized with the user attribute number_format. Defaults to no + bool strict_value_format = 421377265; + // Whether this field is a parameter. + bool parameter = 488615146; + // Whether this field can be removed from a query. + bool permanent = 334244439; + // Whether or not the field represents a primary key. + bool primary_key = 433617583; + // The name of the project this field is defined in. + string project_name = 469694456; + // When true, it's not possible to re-sort this field's values without re-running the SQL query, due to database logic that affects the sort. + bool requires_refresh_on_sort = 274825730; + // The LookML scope this field belongs to. The scope is typically the field's view. + string scope = 109264468; + // Whether this field can be sorted. + bool sortable = 415556350; + // The path portion of source_file_path. + string source_file = 85057760; + // The fully-qualified path of the project file this field is defined in. + string source_file_path = 435439374; + // SQL expression as defined in the LookML model. The SQL syntax shown here is a representation intended for auditability, and is not neccessarily an exact match for what will ultimately be run in the database. It may contain special LookML syntax or annotations that are not valid SQL. This will be null if the current user does not have the see_lookml permission for the field's model. + string sql = 114126; + // An array of conditions and values that make up a SQL Case expression, as defined in the LookML model. The SQL syntax shown here is a representation intended for auditability, and is not neccessarily an exact match for what will ultimately be run in the database. It may contain special LookML syntax or annotations that are not valid SQL. This will be null if the current user does not have the see_lookml permission for the field's model. + repeated LookmlModelExploreFieldSqlCase sql_case = 524422271; + // Array of filter conditions defined for the measure in LookML. + repeated LookmlModelExploreFieldMeasureFilters filters = 427273730; + // The name of the dimension to base suggest queries from. + string suggest_dimension = 473128650; + // The name of the explore to base suggest queries from. + string suggest_explore = 470265034; + // Whether or not suggestions are possible for this field. + bool suggestable = 381392712; + // If available, a list of suggestions for this field. For most fields, a suggest query is a more appropriate way to get an up-to-date list of suggestions. Or use enumerations to list all the possible values. + repeated string suggestions = 381329988; + // An array of arbitrary string tags provided in the model for this field. + repeated string tags = 3552281; + // The LookML type of the field. + string type = 3575610; + // An array of user attribute types that are allowed to be used in filters on this field. Valid values are: "advanced_filter_string", "advanced_filter_number", "advanced_filter_datetime", "string", "number", "datetime", "relative_url", "yesno", "zipcode". + repeated UserAttributeFilterTypes user_attribute_filter_types = 357567957; + // If specified, the LookML value format string for formatting values of this field. + string value_format = 443748997; + // The name of the view this field belongs to. + string view = 3619493; + // The human-readable label of the view the field belongs to. + string view_label = 381619401; + // Whether this field was specified in "dynamic_fields" and is not part of the model. + bool dynamic = 531191823; + // The name of the starting day of the week. Valid values are: "monday", "tuesday", "wednesday", "thursday", "friday", "saturday", "sunday". + WeekStartDay week_start_day = 516641811; + // The number of times this field has been used in queries + int64 times_used = 422530629; +} + + +message LookmlModelExploreFieldEnumeration { + // Label + string label = 102727412; + // Value + google.protobuf.Any value = 111972721; +} + + +message LookmlModelExploreFieldMapLayer { + // URL to the map layer resource. + string url = 116079; + // Name of the map layer, as defined in LookML. + string name = 3373707; + // Specifies the name of the TopoJSON object that the map layer references. If not specified, use the first object.. + string feature_key = 376644202; + // Selects which property from the TopoJSON data to plot against. TopoJSON supports arbitrary metadata for each region. When null, the first matching property should be used. + string property_key = 432087941; + // Which property from the TopoJSON data to use to label the region. When null, property_key should be used. + string property_label_key = 284376789; + // The preferred geographic projection of the map layer when displayed in a visualization that supports multiple geographic projections. + string projection = 492490092; + // Specifies the data format of the region information. Valid values are: "topojson", "vector_tile_region". + Format format = 317194754; + // Specifies the URL to a JSON file that defines the geographic extents of each region available in the map layer. This data is used to automatically center the map on the available data for visualization purposes. The JSON file must be a JSON object where the keys are the mapping value of the feature (as specified by property_key) and the values are arrays of four numbers representing the west longitude, south latitude, east longitude, and north latitude extents of the region. The object must include a key for every possible value of property_key. + string extents_json_url = 393240412; + // The minimum zoom level that the map layer may be displayed at, for visualizations that support zooming. + int64 max_zoom_level = 355661940; + // The maximum zoom level that the map layer may be displayed at, for visualizations that support zooming. + int64 min_zoom_level = 519890953; +} + + +message LookmlModelExploreFieldMeasureFilters { + // Filter field name + string field = 97427706; + // Filter condition value + string condition = 430655858; +} + + +message LookmlModelExploreFieldset { + // Array of dimensions + repeated LookmlModelExploreField dimensions = 414334925; + // Array of measures + repeated LookmlModelExploreField measures = 488406165; + // Array of filters + repeated LookmlModelExploreField filters = 427273730; + // Array of parameters + repeated LookmlModelExploreField parameters = 458736106; +} + + +message LookmlModelExploreFieldSqlCase { + // SQL Case label value + string value = 111972721; + // SQL Case condition expression + string condition = 430655858; +} + + +message LookmlModelExploreFieldTimeInterval { + // The type of time interval this field represents a grouping of. Valid values are: "day", "hour", "minute", "second", "millisecond", "microsecond", "week", "month", "quarter", "year". + Name name = 3373707; + // The number of intervals this field represents a grouping of. + int64 count = 94851343; +} + + +message LookmlModelExploreJoins { + // Name of this join (and name of the view to join) + string name = 3373707; + // Fields referenced by the join + repeated string dependent_fields = 473159934; + // Fields of the joined view to pull into this explore + repeated string fields = 318677073; + // Name of the dimension in this explore whose value is in the primary key of the joined view + string foreign_key = 361959139; + // Name of view to join + string from = 3151786; + // Specifies whether all queries must use an outer join + bool outer_only = 436007740; + // many_to_one, one_to_one, one_to_many, many_to_many + string relationship = 261851592; + // Names of joins that must always be included in SQL queries + repeated string required_joins = 373655301; + // SQL expression that produces a foreign key + string sql_foreign_key = 439274945; + // SQL ON expression describing the join condition + string sql_on = 447472392; + // SQL table name to join + string sql_table_name = 459754643; + // The join type: left_outer, full_outer, inner, or cross + string type = 3575610; + // Label to display in UI selectors + string view_label = 381619401; +} + + +message LookmlModelExploreSet { + // Name + string name = 3373707; + // Value set + repeated string value = 111972721; +} + + +message LookmlModelExploreSupportedMeasureType { + string dimension_type = 236959117; + repeated string measure_types = 317184150; +} + + +message LookmlModelNavExplore { + // Name of the explore + string name = 3373707; + // Description for the explore + string description = 431136513; + // Label for the explore + string label = 102727412; + // Is this explore marked as hidden + bool hidden = 304371861; + // Label used to group explores in the navigation menus + string group_label = 273739597; +} + + +message LookmlTest { + // Operations the current user is able to perform on this object + map can = 98256; + // Name of model containing this test. + string model_name = 526217848; + // Name of this test. + string name = 3373707; + // Name of the explore this test runs a query against + string explore_name = 485574866; + // The url parameters that can be used to reproduce this test's query on an explore. + string query_url_params = 274768500; + // Name of the LookML file containing this test. + string file = 3143036; + // Line number of this test in LookML. + int64 line = 3321844; +} + + +message LookmlTestResult { + // Operations the current user is able to perform on this object + map can = 98256; + // Name of model containing this test. + string model_name = 526217848; + // Name of this test. + string test_name = 295076218; + // Number of assertions in this test + int64 assertions_count = 363767167; + // Number of assertions passed in this test + int64 assertions_failed = 503064762; + // A list of any errors encountered by the test. + repeated ProjectError errors = 323658789; + // A list of any warnings encountered by the test. + repeated ProjectError warnings = 498091095; + // True if this test passsed without errors. + bool success = 466792447; +} + + +message LookModel { + // Model Id + string id = 3355; + // Model Label + string label = 102727412; +} + + +message LookWithDashboards { + // Operations the current user is able to perform on this object + map can = 98256; + // Id of content metadata + int64 content_metadata_id = 293222822; + // Unique Id + int64 id = 3355; + // Look Title + string title = 110371416; + // User Id + int64 user_id = 147132913; + // Content Favorite Id + int64 content_favorite_id = 403544586; + // Time that the Look was created. + google.protobuf.Timestamp created_at = 342420026; + // Whether or not a look is 'soft' deleted. + bool deleted = 387615750; + // Time that the Look was deleted. + google.protobuf.Timestamp deleted_at = 441264006; + // Id of User that deleted the look. + int64 deleter_id = 441368332; + // Description + string description = 431136513; + // Embed Url + string embed_url = 352154685; + // Excel File Url + string excel_file_url = 493227475; + // Number of times favorited + int64 favorite_count = 358762965; + // Google Spreadsheet Formula + string google_spreadsheet_formula = 434476859; + // Image Embed Url + string image_embed_url = 469465270; + // auto-run query when Look viewed + bool is_run_on_load = 494569088; + // Time that the Look was last accessed by any user + google.protobuf.Timestamp last_accessed_at = 391609649; + // Id of User that last updated the look. + int64 last_updater_id = 328160870; + // Time last viewed in the Looker web UI + google.protobuf.Timestamp last_viewed_at = 273134349; + LookModel model = 104069929; + // Is Public + bool public = 488711883; + // Public Slug + string public_slug = 270024015; + // Public Url + string public_url = 476205734; + // Query Id + int64 query_id = 291663619; + // Short Url + string short_url = 506923317; + FolderBase folder = 317241572; + // Folder Id + string folder_id = 527488652; + // Time that the Look was updated. + google.protobuf.Timestamp updated_at = 295464393; + // Number of times viewed in the Looker web UI + int64 view_count = 383588418; + // Dashboards + repeated DashboardBase dashboards = 469015063; +} + + +message LookWithQuery { + // Operations the current user is able to perform on this object + map can = 98256; + // Id of content metadata + int64 content_metadata_id = 293222822; + // Unique Id + int64 id = 3355; + // Look Title + string title = 110371416; + // User Id + int64 user_id = 147132913; + // Content Favorite Id + int64 content_favorite_id = 403544586; + // Time that the Look was created. + google.protobuf.Timestamp created_at = 342420026; + // Whether or not a look is 'soft' deleted. + bool deleted = 387615750; + // Time that the Look was deleted. + google.protobuf.Timestamp deleted_at = 441264006; + // Id of User that deleted the look. + int64 deleter_id = 441368332; + // Description + string description = 431136513; + // Embed Url + string embed_url = 352154685; + // Excel File Url + string excel_file_url = 493227475; + // Number of times favorited + int64 favorite_count = 358762965; + // Google Spreadsheet Formula + string google_spreadsheet_formula = 434476859; + // Image Embed Url + string image_embed_url = 469465270; + // auto-run query when Look viewed + bool is_run_on_load = 494569088; + // Time that the Look was last accessed by any user + google.protobuf.Timestamp last_accessed_at = 391609649; + // Id of User that last updated the look. + int64 last_updater_id = 328160870; + // Time last viewed in the Looker web UI + google.protobuf.Timestamp last_viewed_at = 273134349; + LookModel model = 104069929; + // Is Public + bool public = 488711883; + // Public Slug + string public_slug = 270024015; + // Public Url + string public_url = 476205734; + // Query Id + int64 query_id = 291663619; + // Short Url + string short_url = 506923317; + FolderBase folder = 317241572; + // Folder Id + string folder_id = 527488652; + // Time that the Look was updated. + google.protobuf.Timestamp updated_at = 295464393; + // Number of times viewed in the Looker web UI + int64 view_count = 383588418; + Query query = 107944136; + // Url + string url = 116079; +} + + +message Manifest { + // Operations the current user is able to perform on this object + map can = 98256; + // Manifest project name + string name = 3373707; + // Imports for a project + repeated ImportedProject imports = 481509467; + LocalizationSettings localization_settings = 362233467; +} + + +message MergeFields { + // Field name to map onto in the merged results + string field_name = 288329560; + // Field name from the source query + string source_field_name = 375791445; +} + + +message MergeQuery { + // Operations the current user is able to perform on this object + map can = 98256; + // Column Limit + string column_limit = 114546286; + // Dynamic Fields + string dynamic_fields = 74327737; + // Unique Id + string id = 3355; + // Pivots + repeated string pivots = 493953479; + // Unique to get results + int64 result_maker_id = 347633462; + // Sorts + repeated string sorts = 109624981; + // Source Queries defining the results to be merged. + repeated MergeQuerySourceQuery source_queries = 396289744; + // Total + bool total = 110549828; + // Visualization Config + map vis_config = 422614288; +} + + +message MergeQuerySourceQuery { + // An array defining which fields of the source query are mapped onto fields of the merge query + repeated MergeFields merge_fields = 483158960; + // Display name + string name = 3373707; + // Id of the query to merge + int64 query_id = 291663619; +} + + +message ModelFieldSuggestions { + // List of suggestions + repeated string suggestions = 381329988; + // Error message + string error = 96784904; + // True if result came from the cache + bool from_cache = 452407124; + // True if this was a hit limit + bool hit_limit = 502186499; + // True if calcite was used + bool used_calcite_materialization = 412821088; +} + + +message ModelSet { + // Operations the current user is able to perform on this object + map can = 98256; + bool all_access = 505719009; + bool built_in = 357544798; + // Unique Id + int64 id = 3355; + repeated string models = 534399691; + // Name of ModelSet + string name = 3373707; + // Link to get this item + string url = 116079; +} + + +message ModelsNotValidated { + // Model name + string name = 3373707; + // Project file + string project_file_id = 523157964; +} + +// The type of time interval this field represents a grouping of. Valid values are: "day", "hour", "minute", "second", "millisecond", "microsecond", "week", "month", "quarter", "year". +enum Name { + _NAME_UNSET = 0; + NAME_DAY = 417621262; + NAME_HOUR = 245296360; + NAME_MINUTE = 315400484; + NAME_SECOND = 399274068; + NAME_MILLISECOND = 456314708; + NAME_MICROSECOND = 158944356; + NAME_WEEK = 244859608; + NAME_MONTH = 495179222; + NAME_QUARTER = 490365750; + NAME_YEAR = 244800143; +} + + +message OauthClientApp { + // Operations the current user is able to perform on this object + map can = 98256; + // The globally unique id of this application + string client_guid = 174062979; + // The uri with which this application will receive an auth code by browser redirect. + string redirect_uri = 475615044; + // The application's display name + string display_name = 403771642; + // A description of the application that will be displayed to users + string description = 431136513; + // When enabled is true, OAuth2 and API requests will be accepted from this app. When false, all requests from this app will be refused. + bool enabled = 402398511; + // If set, only Looker users who are members of this group can use this web app with Looker. If group_id is not set, any Looker user may use this app to access this Looker instance + int64 group_id = 506361563; + // All auth codes, access tokens, and refresh tokens issued for this application prior to this date-time for ALL USERS will be invalid. + google.protobuf.Timestamp tokens_invalid_before = 348775418; + // All users who have been activated to use this app + repeated UserPublic activated_users = 331044486; +} + + +message OIDCConfig { + // Operations the current user is able to perform on this object + map can = 98256; + // Allow alternate email-based login via '/login/email' for admins and for specified users with the 'login_special_email' permission. This option is useful as a fallback during ldap setup, if ldap config problems occur later, or if you need to support some users who are not in your ldap directory. Looker email/password logins are always disabled for regular users when ldap is enabled. + bool alternate_email_login_allowed = 332315627; + // OpenID Provider Audience + string audience = 487814402; + // Users will not be allowed to login at all unless a role for them is found in OIDC if set to true + bool auth_requires_role = 488630792; + // OpenID Provider Authorization Url + string authorization_endpoint = 535116198; + // (Write-Only) Array of ids of groups that will be applied to new users the first time they login via OIDC + repeated int64 default_new_user_group_ids = 91703103; + // (Read-only) Groups that will be applied to new users the first time they login via OIDC + repeated Group default_new_user_groups = 381653162; + // (Write-Only) Array of ids of roles that will be applied to new users the first time they login via OIDC + repeated int64 default_new_user_role_ids = 429566625; + // (Read-only) Roles that will be applied to new users the first time they login via OIDC + repeated Role default_new_user_roles = 435450942; + // Enable/Disable OIDC authentication for the server + bool enabled = 402398511; + // (Read-only) Array of mappings between OIDC Groups and Looker Roles + repeated OIDCGroupRead groups = 309365131; + // Name of user record attributes used to indicate groups. Used when 'groups_finder_type' is set to 'grouped_attribute_values' + string groups_attribute = 407121539; + // (Read/Write) Array of mappings between OIDC Groups and arrays of Looker Role ids + repeated OIDCGroupWrite groups_with_role_ids = 475300071; + // Relying Party Identifier (provided by OpenID Provider) + string identifier = 404608213; + // OpenID Provider Issuer + string issuer = 294789969; + // When this config was last modified + google.protobuf.Timestamp modified_at = 386047141; + // User id of user who last modified this config + int64 modified_by = 386047132; + // Merge first-time oidc login to existing user account by email addresses. When a user logs in for the first time via oidc this option will connect this user into their existing account by finding the account with a matching email address by testing the given types of credentials for existing users. Otherwise a new user account will be created for the user. This list (if provided) must be a comma separated list of string like 'email,ldap,google' + string new_user_migration_types = 348180790; + // Array of scopes to request. + repeated string scopes = 453884336; + // (Write-Only) Relying Party Secret (provided by OpenID Provider) + string secret = 453138600; + // Set user roles in Looker based on groups from OIDC + bool set_roles_from_groups = 441288982; + // Slug to identify configurations that are created in order to run a OIDC config test + string test_slug = 295036274; + // OpenID Provider Token Url + string token_endpoint = 495238427; + // Name of user record attributes used to indicate email address field + string user_attribute_map_email = 384506455; + // Name of user record attributes used to indicate first name + string user_attribute_map_first_name = 401269830; + // Name of user record attributes used to indicate last name + string user_attribute_map_last_name = 506361433; + // (Read-only) Array of mappings between OIDC User Attributes and Looker User Attributes + repeated OIDCUserAttributeRead user_attributes = 435197330; + // (Read/Write) Array of mappings between OIDC User Attributes and arrays of Looker User Attribute ids + repeated OIDCUserAttributeWrite user_attributes_with_ids = 514666905; + // OpenID Provider User Information Url + string userinfo_endpoint = 146023451; + // Allow OIDC auth'd users to be members of non-reflected Looker groups. If 'false', user will be removed from non-reflected groups on login. + bool allow_normal_group_membership = 480346046; + // OIDC auth'd users will inherit roles from non-reflected Looker groups. + bool allow_roles_from_normal_groups = 302729383; + // Allows roles to be directly assigned to OIDC auth'd users. + bool allow_direct_roles = 315512839; + // Link to get this item + string url = 116079; +} + + +message OIDCGroupRead { + // Unique Id + int64 id = 3355; + // Unique Id of group in Looker + int64 looker_group_id = 304537367; + // Name of group in Looker + string looker_group_name = 301354167; + // Name of group in OIDC + string name = 3373707; + // Looker Roles + repeated Role roles = 108695229; +} + + +message OIDCGroupWrite { + // Unique Id + int64 id = 3355; + // Unique Id of group in Looker + int64 looker_group_id = 304537367; + // Name of group in Looker + string looker_group_name = 301354167; + // Name of group in OIDC + string name = 3373707; + // Looker Role Ids + repeated int64 role_ids = 266265745; +} + + +message OIDCUserAttributeRead { + // Name of User Attribute in OIDC + string name = 3373707; + // Required to be in OIDC assertion for login to be allowed to succeed + bool required = 393139297; + // Looker User Attributes + repeated UserAttribute user_attributes = 435197330; +} + + +message OIDCUserAttributeWrite { + // Name of User Attribute in OIDC + string name = 3373707; + // Required to be in OIDC assertion for login to be allowed to succeed + bool required = 393139297; + // Looker User Attribute Ids + repeated int64 user_attribute_ids = 468967351; +} + + +message PasswordConfig { + // Operations the current user is able to perform on this object + map can = 98256; + // Minimum number of characters required for a new password. Must be between 7 and 100 + int64 min_length = 355788614; + // Require at least one numeric character + bool require_numeric = 381969292; + // Require at least one uppercase and one lowercase letter + bool require_upperlower = 533074595; + // Require at least one special character + bool require_special = 379956471; +} + + +message Permission { + // Operations the current user is able to perform on this object + map can = 98256; + // Permission symbol + string permission = 517618225; + // Dependency parent symbol + string parent = 497712043; + // Description + string description = 431136513; +} + + +message PermissionSet { + // Operations the current user is able to perform on this object + map can = 98256; + bool all_access = 505719009; + bool built_in = 357544798; + // Unique Id + int64 id = 3355; + // Name of PermissionSet + string name = 3373707; + repeated string permissions = 283426081; + // Link to get this item + string url = 116079; +} + +// Type of permission: "view" or "edit" Valid values are: "view", "edit". +enum PermissionType { + _PERMISSION_TYPE_UNSET = 0; + PERMISSION_TYPE_VIEW = 370880998; + PERMISSION_TYPE_EDIT = 371392129; +} + + +message Project { + // Operations the current user is able to perform on this object + map can = 98256; + // Project Id + string id = 3355; + // Project display name + string name = 3373707; + // If true the project is configured with a git repository + bool uses_git = 265231137; + // Git remote repository url + string git_remote_url = 337946079; + // Git username for HTTPS authentication. (For production only, if using user attributes.) + string git_username = 393319598; + // (Write-Only) Git password for HTTPS authentication. (For production only, if using user attributes.) + string git_password = 348030004; + // User attribute name for username in per-user HTTPS authentication. + string git_username_user_attribute = 430560495; + // User attribute name for password in per-user HTTPS authentication. + string git_password_user_attribute = 173008929; + // Name of the git service provider + string git_service_name = 481280695; + // Port that HTTP(S) application server is running on (for PRs, file browsing, etc.) + int64 git_application_server_http_port = 478877938; + // Scheme that is running on application server (for PRs, file browsing, etc.) Valid values are: "http", "https". + GitApplicationServerHttpScheme git_application_server_http_scheme = 452168167; + // (Write-Only) Optional secret token with which to authenticate requests to the webhook deploy endpoint. If not set, endpoint is unauthenticated. + string deploy_secret = 451554436; + // (Write-Only) When true, unsets the deploy secret to allow unauthenticated access to the webhook deploy endpoint. + bool unset_deploy_secret = 465427100; + // The git pull request policy for this project. Valid values are: "off", "links", "recommended", "required". + PullRequestMode pull_request_mode = 421135683; + // Validation policy: If true, the project must pass validation checks before project changes can be committed to the git repository + bool validation_required = 427509330; + // If true, advanced git release management is enabled for this project + bool git_release_mgmt_enabled = 529764014; + // Validation policy: If true, the project can be committed with warnings when `validation_required` is true. (`allow_warnings` does nothing if `validation_required` is false). + bool allow_warnings = 320266972; + // If true the project is an example project and cannot be modified + bool is_example = 177339221; + // Status of dependencies in your manifest & lockfile + string dependency_status = 50086042; +} + + +message ProjectError { + // A stable token that uniquely identifies this class of error, ignoring parameter values. Error message text may vary due to parameters or localization, but error codes do not. For example, a "File not found" error will have the same error code regardless of the filename in question or the user's display language + string code = 3059181; + // Severity: fatal, error, warning, info, success + string severity = 369575103; + // Error classification: syntax, deprecation, model_configuration, etc + string kind = 3292052; + // Error message which may contain information such as dashboard or model names that may be considered sensitive in some use cases. Avoid storing or sending this message outside of Looker + string message = 477462531; + // The field associated with this error + string field_name = 288329560; + // Name of the file containing this error + string file_path = 329102014; + // Line number in the file of this error + int64 line_number = 396496372; + // The model associated with this error + string model_id = 309519111; + // The explore associated with this error + string explore = 327287131; + // A link to Looker documentation about this error + string help_url = 394715719; + // Error parameters + map params = 497713981; + // A version of the error message that does not contain potentially sensitive information. Suitable for situations in which messages are stored or sent to consumers outside of Looker, such as external logs. Sanitized messages will display "(?)" where sensitive information would appear in the corresponding non-sanitized message + string sanitized_message = 454942833; +} + + +message ProjectFile { + // Operations the current user is able to perform on this object + map can = 98256; + // An opaque token uniquely identifying a file within a project. Avoid parsing or decomposing the text of this token. This token is stable within a Looker release but may change between Looker releases + string id = 3355; + // Path, file name, and extension of the file relative to the project root directory + string path = 3433509; + // Display name + string title = 110371416; + // File type: model, view, etc + string type = 3575610; + // The extension of the file: .view.lkml, .model.lkml, etc + string extension = 306278880; + // File mime type + string mime_type = 196041627; + // State of editability for the file. + bool editable = 400604057; + GitStatus git_status = 436663488; +} + + +message ProjectValidation { + // A list of project errors + repeated ProjectError errors = 323658789; + // A hash value computed from the project's current state + string project_digest = 530534826; + // A list of models which were not fully validated + repeated ModelsNotValidated models_not_validated = 470831011; + // Duration of project validation in seconds + float computation_time = 279797358; +} + + +message ProjectValidationCache { + // A list of project errors + repeated ProjectError errors = 323658789; + // A hash value computed from the project's current state + string project_digest = 530534826; + // A list of models which were not fully validated + repeated ModelsNotValidated models_not_validated = 470831011; + // Duration of project validation in seconds + float computation_time = 279797358; + // If true, the cached project validation results are no longer accurate because the project has changed since the cached results were calculated + bool stale = 109757337; +} + + +message ProjectWorkspace { + // Operations the current user is able to perform on this object + map can = 98256; + // The id of the project + string project_id = 492492543; + // The id of the local workspace containing the project files + string workspace_id = 394620993; + // The status of the local git directory + string git_status = 436663488; + // Git head revision name + string git_head = 307839627; + // Status of the dependencies in your project. Valid values are: "lock_optional", "lock_required", "lock_error", "install_none". + DependencyStatus dependency_status = 50086042; + GitBranch git_branch = 514941099; + // The lookml syntax used by all files in this project + string lookml_type = 315076713; +} + +// The git pull request policy for this project. Valid values are: "off", "links", "recommended", "required". +enum PullRequestMode { + _PULL_REQUEST_MODE_UNSET = 0; + PULL_REQUEST_MODE_OFF = 481298627; + PULL_REQUEST_MODE_LINKS = 331454081; + PULL_REQUEST_MODE_RECOMMENDED = 207710761; + PULL_REQUEST_MODE_REQUIRED = 356618951; +} + + +message Query { + // Operations the current user is able to perform on this object + map can = 98256; + // Unique Id + int64 id = 3355; + // Model + string model = 104069929; + // Explore Name + string view = 3619493; + // Fields + repeated string fields = 318677073; + // Pivots + repeated string pivots = 493953479; + // Fill Fields + repeated string fill_fields = 491318906; + // Filters + map filters = 427273730; + // Filter Expression + string filter_expression = 294547567; + // Sorting for the query results. Use the format `["view.field", ...]` to sort on fields in ascending order. Use the format `["view.field desc", ...]` to sort on fields in descending order. Use `["__UNSORTED__"]` (2 underscores before and after) to disable sorting entirely. Empty sorts `[]` will trigger a default sort. + repeated string sorts = 109624981; + // Limit + string limit = 102976443; + // Column Limit + string column_limit = 114546286; + // Total + bool total = 110549828; + // Raw Total + string row_total = 339028775; + // Fields on which to run subtotals + repeated string subtotals = 277302775; + // Visualization configuration properties. These properties are typically opaque and differ based on the type of visualization used. There is no specified set of allowed keys. The values can be any type supported by JSON. A "type" key with a string value is often present, and is used by Looker to determine which visualization to present. Visualizations ignore unknown vis_config properties. + map vis_config = 422614288; + // The filter_config represents the state of the filter UI on the explore page for a given query. When running a query via the Looker UI, this parameter takes precedence over "filters". When creating a query or modifying an existing query, "filter_config" should be set to null. Setting it to any other value could cause unexpected filtering behavior. The format should be considered opaque. + map filter_config = 505106699; + // Visible UI Sections + string visible_ui_sections = 328103627; + // Slug + string slug = 3533483; + // Dynamic Fields + string dynamic_fields = 74327737; + // Client Id: used to generate shortened explore URLs. If set by client, must be a unique 22 character alphanumeric string. Otherwise one will be generated. + string client_id = 476022396; + // Share Url + string share_url = 395423932; + // Expanded Share Url + string expanded_share_url = 427685755; + // Expanded Url + string url = 116079; + // Query Timezone + string query_timezone = 202424720; + // Has Table Calculations + bool has_table_calculations = 464917416; +} + + +message QueryTask { + // Operations the current user is able to perform on this object + map can = 98256; + // Unique Id + string id = 3355; + // Id of query + int64 query_id = 291663619; + Query query = 107944136; + // whether or not to generate links in the query response. + bool generate_links = 443594980; + // Use production models to run query (even is user is in dev mode). + bool force_production = 378049187; + // Prefix to use for drill links. + string path_prefix = 525914619; + // Whether or not to use the cache + bool cache = 94416770; + // Whether or not to run table calculations on the server + bool server_table_calcs = 334030724; + // Retrieve any results from cache even if the results have expired. + bool cache_only = 28936777; + // cache key used to cache query. + string cache_key = 276629999; + // Status of query task. + string status = 446240775; + // Source of query task. + string source = 448252914; + // Runtime of prior queries. + float runtime = 387740662; + // Rebuild PDTS used in query. + bool rebuild_pdts = 256532759; + // Source of the results of the query. + string result_source = 380511937; + // Id of look associated with query. + int64 look_id = 349778619; + // Id of dashboard associated with query. + string dashboard_id = 496187565; + // The data format of the query results. + string result_format = 283324265; +} + + +message RenderTask { + // Operations the current user is able to perform on this object + map can = 98256; + // Date/Time render task was created + string created_at = 342420026; + // Filter values to apply to the dashboard queries, in URL query format + string dashboard_filters = 454602920; + // Id of dashboard to render + int64 dashboard_id = 496187565; + // Dashboard layout style: single_column or tiled + string dashboard_style = 394441966; + // Date/Time render task was completed + string finalized_at = 388292413; + // Output height in pixels. Flowed layouts may ignore this value. + int64 height = 305257398; + // Id of this render task + string id = 3355; + // Id of look to render + int64 look_id = 349778619; + // Id of lookml dashboard to render + string lookml_dashboard_id = 339184839; + // Id of query to render + int64 query_id = 291663619; + // Number of seconds elapsed running queries + double query_runtime = 336843224; + // Number of seconds elapsed rendering data + double render_runtime = 497365131; + // Output format: pdf, png, or jpg + string result_format = 283324265; + // Total seconds elapsed for render task + double runtime = 387740662; + // Render task status: enqueued_for_query, querying, enqueued_for_render, rendering, success, failure + string status = 446240775; + // Additional information about the current status + string status_detail = 522482767; + // The user account permissions in which the render task will execute + int64 user_id = 147132913; + // Output width in pixels + int64 width = 113126854; +} + + +message RepositoryCredential { + // Operations the current user is able to perform on this object + map can = 98256; + // Unique Id + string id = 3355; + // Root project Id + string root_project_id = 379036192; + // Git remote repository url + string remote_url = 520826107; + // Git username for HTTPS authentication. + string git_username = 393319598; + // (Write-Only) Git password for HTTPS authentication. + string git_password = 348030004; + // Public deploy key for SSH authentication. + string ssh_public_key = 461638024; + // Whether the credentials have been configured for the Git Repository. + bool is_configured = 289271523; +} + +// Desired async query result format. Valid values are: "inline_json", "json", "json_detail", "json_fe", "csv", "html", "md", "txt", "xlsx", "gsxml". +enum ResultFormat { + _RESULT_FORMAT_UNSET = 0; + RESULT_FORMAT_INLINE_JSON = 466622342; + RESULT_FORMAT_JSON = 329557308; + RESULT_FORMAT_JSON_DETAIL = 397990225; + RESULT_FORMAT_JSON_FE = 453484516; + RESULT_FORMAT_CSV = 495548224; + RESULT_FORMAT_HTML = 329571979; + RESULT_FORMAT_MD = 516886369; + RESULT_FORMAT_TXT = 495544101; + RESULT_FORMAT_XLSX = 329454688; + RESULT_FORMAT_GSXML = 520451209; +} + + +message ResultMakerFilterables { + // The model this filterable comes from (used for field suggestions). + string model = 104069929; + // The view this filterable comes from (used for field suggestions). + string view = 3619493; + // The name of the filterable thing (Query or Merged Results). + string name = 3373707; + // array of dashboard_filter_name: and field: objects. + repeated ResultMakerFilterablesListen listen = 275627150; +} + + +message ResultMakerFilterablesListen { + // The name of a dashboard filter to listen to. + string dashboard_filter_name = 270130631; + // The name of the field in the filterable to filter with the value of the dashboard filter. + string field = 97427706; +} + + +message ResultMakerWithIdVisConfigAndDynamicFields { + // Unique Id. + int64 id = 3355; + // JSON string of dynamic field information. + string dynamic_fields = 74327737; + // array of items that can be filtered and information about them. + repeated ResultMakerFilterables filterables = 444310799; + // Sorts of the constituent Look, Query, or Merge Query + repeated string sorts = 109624981; + // ID of merge result if this is a merge_result. + string merge_result_id = 374234034; + // Total of the constituent Look, Query, or Merge Query + bool total = 110549828; + // ID of query if this is a query. + int64 query_id = 291663619; + // ID of SQL Query if this is a SQL Runner Query + string sql_query_id = 57188541; + Query query = 107944136; + // Vis config of the constituent Query, or Merge Query. + map vis_config = 422614288; +} + + +message Role { + // Operations the current user is able to perform on this object + map can = 98256; + // Unique Id + int64 id = 3355; + // Name of Role + string name = 3373707; + PermissionSet permission_set = 359445379; + // (Write-Only) Id of permission set + int64 permission_set_id = 379867460; + ModelSet model_set = 502576493; + // (Write-Only) Id of model set + int64 model_set_id = 1141778; + // Link to get this item + string url = 116079; + // Link to get list of users with this role + string users_url = 357383864; +} + + +message RunningQueries { + // Operations the current user is able to perform on this object + map can = 98256; + // Unique Id + int64 id = 3355; + UserPublic user = 3599307; + Query query = 107944136; + SqlQuery sql_query = 464767429; + LookBasic look = 3327647; + // Date/Time Query was initiated + string created_at = 342420026; + // Date/Time Query was completed + string completed_at = 409546073; + // Query Id + string query_id = 291663619; + // Source (look, dashboard, queryrunner, explore, etc.) + string source = 448252914; + // Node Id + string node_id = 528612126; + // Slug + string slug = 3533483; + // ID of a Query Task + string query_task_id = 435207576; + // Cache Key + string cache_key = 276629999; + // Connection + string connection_name = 365832102; + // Dialect + string dialect = 413753737; + // Connection ID + string connection_id = 513204708; + // Additional Information(Error message or verbose status) + string message = 477462531; + // Status description + string status = 446240775; + // Number of seconds elapsed running the Query + double runtime = 387740662; + // SQL text of the query as run + string sql = 114126; +} + + +message SamlConfig { + // Operations the current user is able to perform on this object + map can = 98256; + // Enable/Disable Saml authentication for the server + bool enabled = 402398511; + // Identity Provider Certificate (provided by IdP) + string idp_cert = 164795758; + // Identity Provider Url (provided by IdP) + string idp_url = 416975417; + // Identity Provider Issuer (provided by IdP) + string idp_issuer = 360328317; + // Identity Provider Audience (set in IdP config). Optional in Looker. Set this only if you want Looker to validate the audience value returned by the IdP. + string idp_audience = 473436179; + // Count of seconds of clock drift to allow when validating timestamps of assertions. + int64 allowed_clock_drift = 494276104; + // Name of user record attributes used to indicate email address field + string user_attribute_map_email = 384506455; + // Name of user record attributes used to indicate first name + string user_attribute_map_first_name = 401269830; + // Name of user record attributes used to indicate last name + string user_attribute_map_last_name = 506361433; + // Merge first-time saml login to existing user account by email addresses. When a user logs in for the first time via saml this option will connect this user into their existing account by finding the account with a matching email address by testing the given types of credentials for existing users. Otherwise a new user account will be created for the user. This list (if provided) must be a comma separated list of string like 'email,ldap,google' + string new_user_migration_types = 348180790; + // Allow alternate email-based login via '/login/email' for admins and for specified users with the 'login_special_email' permission. This option is useful as a fallback during ldap setup, if ldap config problems occur later, or if you need to support some users who are not in your ldap directory. Looker email/password logins are always disabled for regular users when ldap is enabled. + bool alternate_email_login_allowed = 332315627; + // Slug to identify configurations that are created in order to run a Saml config test + string test_slug = 295036274; + // When this config was last modified + string modified_at = 386047141; + // User id of user who last modified this config + string modified_by = 386047132; + // (Read-only) Roles that will be applied to new users the first time they login via Saml + repeated Role default_new_user_roles = 435450942; + // (Read-only) Groups that will be applied to new users the first time they login via Saml + repeated Group default_new_user_groups = 381653162; + // (Write-Only) Array of ids of roles that will be applied to new users the first time they login via Saml + repeated int64 default_new_user_role_ids = 429566625; + // (Write-Only) Array of ids of groups that will be applied to new users the first time they login via Saml + repeated int64 default_new_user_group_ids = 91703103; + // Set user roles in Looker based on groups from Saml + bool set_roles_from_groups = 441288982; + // Name of user record attributes used to indicate groups. Used when 'groups_finder_type' is set to 'grouped_attribute_values' + string groups_attribute = 407121539; + // (Read-only) Array of mappings between Saml Groups and Looker Roles + repeated SamlGroupRead groups = 309365131; + // (Read/Write) Array of mappings between Saml Groups and arrays of Looker Role ids + repeated SamlGroupWrite groups_with_role_ids = 475300071; + // Users will not be allowed to login at all unless a role for them is found in Saml if set to true + bool auth_requires_role = 488630792; + // (Read-only) Array of mappings between Saml User Attributes and Looker User Attributes + repeated SamlUserAttributeRead user_attributes = 435197330; + // (Read/Write) Array of mappings between Saml User Attributes and arrays of Looker User Attribute ids + repeated SamlUserAttributeWrite user_attributes_with_ids = 514666905; + // Identifier for a strategy for how Looker will find groups in the SAML response. One of ['grouped_attribute_values', 'individual_attributes'] + string groups_finder_type = 336365796; + // Value for group attribute used to indicate membership. Used when 'groups_finder_type' is set to 'individual_attributes' + string groups_member_value = 400280826; + // Bypass the login page when user authentication is required. Redirect to IdP immediately instead. + bool bypass_login_page = 349317458; + // Allow SAML auth'd users to be members of non-reflected Looker groups. If 'false', user will be removed from non-reflected groups on login. + bool allow_normal_group_membership = 480346046; + // SAML auth'd users will inherit roles from non-reflected Looker groups. + bool allow_roles_from_normal_groups = 302729383; + // Allows roles to be directly assigned to SAML auth'd users. + bool allow_direct_roles = 315512839; + // Link to get this item + string url = 116079; +} + + +message SamlGroupRead { + // Unique Id + int64 id = 3355; + // Unique Id of group in Looker + int64 looker_group_id = 304537367; + // Name of group in Looker + string looker_group_name = 301354167; + // Name of group in Saml + string name = 3373707; + // Looker Roles + repeated Role roles = 108695229; + // Link to saml config + string url = 116079; +} + + +message SamlGroupWrite { + // Unique Id + int64 id = 3355; + // Unique Id of group in Looker + int64 looker_group_id = 304537367; + // Name of group in Looker + string looker_group_name = 301354167; + // Name of group in Saml + string name = 3373707; + // Looker Role Ids + repeated int64 role_ids = 266265745; + // Link to saml config + string url = 116079; +} + + +message SamlMetadataParseResult { + // Operations the current user is able to perform on this object + map can = 98256; + // Identify Provider Issuer + string idp_issuer = 360328317; + // Identify Provider Url + string idp_url = 416975417; + // Identify Provider Certificate + string idp_cert = 164795758; +} + + +message SamlUserAttributeRead { + // Name of User Attribute in Saml + string name = 3373707; + // Required to be in Saml assertion for login to be allowed to succeed + bool required = 393139297; + // Looker User Attributes + repeated UserAttribute user_attributes = 435197330; + // Link to saml config + string url = 116079; +} + + +message SamlUserAttributeWrite { + // Name of User Attribute in Saml + string name = 3373707; + // Required to be in Saml assertion for login to be allowed to succeed + bool required = 393139297; + // Looker User Attribute Ids + repeated int64 user_attribute_ids = 468967351; + // Link to saml config + string url = 116079; +} + + +message ScheduledPlan { + // Name of this scheduled plan + string name = 3373707; + // User Id which owns this scheduled plan + int64 user_id = 147132913; + // Whether schedule is run as recipient (only applicable for email recipients) + bool run_as_recipient = 296373536; + // Whether the ScheduledPlan is enabled + bool enabled = 402398511; + // Id of a look + int64 look_id = 349778619; + // Id of a dashboard + int64 dashboard_id = 496187565; + // Id of a LookML dashboard + string lookml_dashboard_id = 339184839; + // Query string to run look or dashboard with + string filters_string = 109992149; + // (DEPRECATED) Alias for filters_string field + string dashboard_filters = 454602920; + // Delivery should occur if running the dashboard or look returns results + bool require_results = 330870556; + // Delivery should occur if the dashboard look does not return results + bool require_no_results = 420497737; + // Delivery should occur if data have changed since the last run + bool require_change = 381228813; + // Will run an unlimited query and send all results. + bool send_all_results = 335604919; + // Vixie-Style crontab specification when to run + string crontab = 519088419; + // Name of a datagroup; if specified will run when datagroup triggered (can't be used with cron string) + string datagroup = 356040427; + // Timezone for interpreting the specified crontab (default is Looker instance timezone) + string timezone = 519056897; + // Query id + string query_id = 291663619; + // Scheduled plan destinations + repeated ScheduledPlanDestination scheduled_plan_destination = 354055597; + // Whether the plan in question should only be run once (usually for testing) + bool run_once = 407993818; + // Whether links back to Looker should be included in this ScheduledPlan + bool include_links = 386647463; + // The size of paper the PDF should be formatted to fit. Valid values are: "letter", "legal", "tabloid", "a0", "a1", "a2", "a3", "a4", "a5". + string pdf_paper_size = 491759120; + // Whether the PDF should be formatted for landscape orientation + bool pdf_landscape = 359176724; + // Whether this schedule is in an embed context or not + bool embed = 96620249; + // Color scheme of the dashboard if applicable + string color_theme = 331361403; + // Whether or not to expand table vis to full length + bool long_tables = 359736222; + // The pixel width at which we render the inline table visualizations + int64 inline_table_width = 484902747; + // Unique Id + int64 id = 3355; + // Date and time when ScheduledPlan was created + google.protobuf.Timestamp created_at = 342420026; + // Date and time when ScheduledPlan was last updated + google.protobuf.Timestamp updated_at = 295464393; + // Title + string title = 110371416; + UserPublic user = 3599307; + // When the ScheduledPlan will next run (null if running once) + google.protobuf.Timestamp next_run_at = 368958540; + // When the ScheduledPlan was last run + google.protobuf.Timestamp last_run_at = 502040332; + // Operations the current user is able to perform on this object + map can = 98256; +} + + +message ScheduledPlanDestination { + // Unique Id + int64 id = 3355; + // Id of a scheduled plan you own + int64 scheduled_plan_id = 335709463; + // The data format to send to the given destination. Supported formats vary by destination, but include: "txt", "csv", "inline_json", "json", "json_detail", "xlsx", "html", "wysiwyg_pdf", "assembled_pdf", "wysiwyg_png" + string format = 317194754; + // Are values formatted? (containing currency symbols, digit separators, etc. + bool apply_formatting = 400669803; + // Whether visualization options are applied to the results. + bool apply_vis = 518830860; + // Address for recipient. For email e.g. 'user@example.com'. For webhooks e.g. 'https://domain/path'. For Amazon S3 e.g. 's3://bucket-name/path/'. For SFTP e.g. 'sftp://host-name/path/'. + string address = 286923011; + // Whether the recipient is a Looker user on the current instance (only applicable for email recipients) + bool looker_recipient = 290777818; + // Type of the address ('email', 'webhook', 's3', or 'sftp') + string type = 3575610; + // JSON object containing parameters for external scheduling. For Amazon S3, this requires keys and values for access_key_id and region. For SFTP, this requires a key and value for username. + string parameters = 458736106; + // (Write-Only) JSON object containing secret parameters for external scheduling. For Amazon S3, this requires a key and value for secret_access_key. For SFTP, this requires a key and value for password. + string secret_parameters = 529011017; + // Optional message to be included in scheduled emails + string message = 477462531; +} + + +message Schema { + // Schema name + string name = 3373707; + // True if this is the default schema + bool is_default = 312463349; +} + + +message SchemaColumn { + // Schema item name + string name = 3373707; + // Full name of item + string sql_escaped_name = 394857990; + // Name of schema + string schema_name = 505336523; + // SQL dialect data type + string data_type_database = 407848203; + // Data type + string data_type = 363359569; + // Looker data type + string data_type_looker = 463896804; + // SQL data type + string description = 431136513; + // Column data size + int64 column_size = 417385051; + // SQL Runner snippets for this connection + repeated Snippet snippets = 513815286; +} + + +message SchemaColumns { + // Schema item name + string name = 3373707; + // Full name of item + string sql_escaped_name = 394857990; + // Name of schema + string schema_name = 505336523; + // Columns for this schema + repeated SchemaColumn columns = 474860526; +} + + +message SchemaTable { + // Schema item name + string name = 3373707; + // Full name of item + string sql_escaped_name = 394857990; + // Name of schema + string schema_name = 505336523; + // Number of data rows + int64 rows = 3506649; + // External reference??? + string external = 455190285; + // SQL Runner snippets for connection + repeated Snippet snippets = 513815286; +} + + +message SchemaTables { + // Schema name + string name = 3373707; + // True if this is the default schema + bool is_default = 312463349; + // Tables for this schema + repeated SchemaTable tables = 440688845; +} + + +message Session { + // Operations the current user is able to perform on this object + map can = 98256; + // Unique Id + int64 id = 3355; + // IP address of user when this session was initiated + string ip_address = 370003511; + // User's browser type + string browser = 150940456; + // User's Operating System + string operating_system = 223060243; + // City component of user location (derived from IP address) + string city = 3053931; + // State component of user location (derived from IP address) + string state = 109757585; + // Country component of user location (derived from IP address) + string country = 478915531; + // Type of credentials used for logging in this session + string credentials_type = 525817688; + // Time when this session was last extended by the user + string extended_at = 345479555; + // Number of times this session was extended + int64 extended_count = 354296994; + // Actual user in the case when this session represents one user sudo'ing as another + int64 sudo_user_id = 287410032; + // Time when this session was initiated + string created_at = 342420026; + // Time when this session will expire + string expires_at = 416905585; + // Link to get this item + string url = 116079; +} + + +message SessionConfig { + // Operations the current user is able to perform on this object + map can = 98256; + // Allow users to have persistent sessions when they login + bool allow_persistent_sessions = 319921999; + // Number of minutes for user sessions. Must be between 5 and 43200 + int64 session_minutes = 457960434; + // Allow users to have an unbounded number of concurrent sessions (otherwise, users will be limited to only one session at a time). + bool unlimited_sessions_per_user = 344212172; + // Enforce session logout for sessions that are inactive for 15 minutes. + bool use_inactivity_based_logout = 353744386; + // Track location of session when user logs in. + bool track_session_location = 531851756; +} + + +message Snippet { + // Name of the snippet + string name = 3373707; + // Label of the snippet + string label = 102727412; + // SQL text of the snippet + string sql = 114126; +} + + +message SqlQuery { + // Operations the current user is able to perform on this object + map can = 98256; + // The identifier of the SQL query + string slug = 3533483; + // Number of seconds this query took to run the most recent time it was run + float last_runtime = 531023051; + // Number of times this query has been run + int64 run_count = 485221797; + // Maximum number of rows this query will display on the SQL Runner page + int64 browser_limit = 377817351; + // SQL query text + string sql = 114126; + // The most recent time this query was run + string last_run_at = 502040332; + DBConnectionBase connection = 387825809; + // Model name this query uses + string model_name = 526217848; + UserPublic creator = 514277398; + // Explore page URL for this SQL query + string explore_url = 362030223; + // Should this query be rendered as plain text + bool plaintext = 493308541; + // Visualization configuration properties. These properties are typically opaque and differ based on the type of visualization used. There is no specified set of allowed keys. The values can be any type supported by JSON. A "type" key with a string value is often present, and is used by Looker to determine which visualization to present. Visualizations ignore unknown vis_config properties. + map vis_config = 422614288; + // ID of the ResultMakerLookup entry. + int64 result_maker_id = 347633462; +} + + +message SqlQueryCreate { + // Name of the db connection on which to run this query + string connection_name = 365832102; + // (DEPRECATED) Use `connection_name` instead + string connection_id = 513204708; + // Name of LookML Model (this or `connection_id` required) + string model_name = 526217848; + // SQL query + string sql = 114126; + // Visualization configuration properties. These properties are typically opaque and differ based on the type of visualization used. There is no specified set of allowed keys. The values can be any type supported by JSON. A "type" key with a string value is often present, and is used by Looker to determine which visualization to present. Visualizations ignore unknown vis_config properties. + map vis_config = 422614288; +} + + +message SshPublicKey { + // The SSH public key created for this instance + string public_key = 476203234; +} + + +message SshServer { + // A unique id used to identify this SSH Server + string ssh_server_id = 326923896; + // The name to identify this SSH Server + string ssh_server_name = 432527764; + // The hostname or ip address of the SSH Server + string ssh_server_host = 432486491; + // The port to connect to on the SSH Server + int64 ssh_server_port = 432546065; + // The username used to connect to the SSH Server + string ssh_server_user = 432584164; + // The md5 fingerprint used to identify the SSH Server + string finger_print = 192567721; + // The SHA fingerprint used to identify the SSH Server + string sha_finger_print = 505918710; + // The SSH public key created for this instance + string public_key = 476203234; + // The current connection status to this SSH Server + string status = 446240775; +} + + +message SshTunnel { + // Unique ID for the tunnel + string tunnel_id = 313005097; + // SSH Server ID + string ssh_server_id = 326923896; + // SSH Server name + string ssh_server_name = 432527764; + // SSH Server Hostname or IP Address + string ssh_server_host = 432486491; + // SSH Server port + int64 ssh_server_port = 432546065; + // Username used to connect to the SSH Server + string ssh_server_user = 432584164; + // Time of last connect attempt + string last_attempt = 102099420; + // Localhost Port used by the Looker instance to connect to the remote DB + int64 local_host_port = 317463865; + // Hostname or IP Address of the Database Server + string database_host = 332770181; + // Port that the Database Server is listening on + int64 database_port = 332710606; + // Current connection status for this Tunnel + string status = 446240775; +} + +// A list of action types the integration supports. Valid values are: "cell", "query", "dashboard". +enum SupportedActionTypes { + _SUPPORTED_ACTION_TYPES_UNSET = 0; + SUPPORTED_ACTION_TYPES_CELL = 39602752; + SUPPORTED_ACTION_TYPES_QUERY = 303571453; + SUPPORTED_ACTION_TYPES_DASHBOARD = 431901642; +} + +// A list of all the download mechanisms the integration supports. The order of values is not significant: Looker will select the most appropriate supported download mechanism for a given query. The integration must ensure it can handle any of the mechanisms it claims to support. If unspecified, this defaults to all download setting values. Valid values are: "push", "url". +enum SupportedDownloadSettings { + _SUPPORTED_DOWNLOAD_SETTINGS_UNSET = 0; + SUPPORTED_DOWNLOAD_SETTINGS_PUSH = 431825476; + SUPPORTED_DOWNLOAD_SETTINGS_URL = 505621465; +} + +// A list of data formats the integration supports. If unspecified, the default is all data formats. Valid values are: "txt", "csv", "inline_json", "json", "json_label", "json_detail", "json_detail_lite_stream", "xlsx", "html", "wysiwyg_pdf", "assembled_pdf", "wysiwyg_png", "csv_zip". +enum SupportedFormats { + _SUPPORTED_FORMATS_UNSET = 0; + SUPPORTED_FORMATS_TXT = 273431079; + SUPPORTED_FORMATS_CSV = 273426956; + SUPPORTED_FORMATS_INLINE_JSON = 354342170; + SUPPORTED_FORMATS_JSON = 454587364; + SUPPORTED_FORMATS_JSON_LABEL = 320226243; + SUPPORTED_FORMATS_JSON_DETAIL = 319538827; + SUPPORTED_FORMATS_JSON_DETAIL_LITE_STREAM = 479092957; + SUPPORTED_FORMATS_XLSX = 454176883; + SUPPORTED_FORMATS_HTML = 454646049; + SUPPORTED_FORMATS_WYSIWYG_PDF = 506730778; + SUPPORTED_FORMATS_ASSEMBLED_PDF = 511286980; + SUPPORTED_FORMATS_WYSIWYG_PNG = 506730622; + SUPPORTED_FORMATS_CSV_ZIP = 451048029; +} + +// A list of formatting options the integration supports. If unspecified, defaults to all formats. Valid values are: "formatted", "unformatted". +enum SupportedFormattings { + _SUPPORTED_FORMATTINGS_UNSET = 0; + SUPPORTED_FORMATTINGS_FORMATTED = 528102542; + SUPPORTED_FORMATTINGS_UNFORMATTED = 272716527; +} + +// A list of visualization formatting options the integration supports. If unspecified, defaults to all formats. Valid values are: "apply", "noapply". +enum SupportedVisualizationFormattings { + _SUPPORTED_VISUALIZATION_FORMATTINGS_UNSET = 0; + SUPPORTED_VISUALIZATION_FORMATTINGS_APPLY = 402869043; + SUPPORTED_VISUALIZATION_FORMATTINGS_NOAPPLY = 464866640; +} + + +message Theme { + // Operations the current user is able to perform on this object + map can = 98256; + // Timestamp for when this theme becomes active. Null=always + google.protobuf.Timestamp begin_at = 372355810; + // Timestamp for when this theme expires. Null=never + google.protobuf.Timestamp end_at = 324690554; + // Unique Id + int64 id = 3355; + // Name of theme. Can only be alphanumeric and underscores. + string name = 3373707; + ThemeSettings settings = 358657800; +} + + +message ThemeSettings { + // Default background color + string background_color = 509195076; + // Base font size for scaling fonts + string base_font_size = 356717952; + // Optional. ID of color collection to use with the theme. Use an empty string for none. + string color_collection_id = 521066304; + // Default font color + string font_color = 507058317; + // Primary font family + string font_family = 383510869; + // Source specification for font + string font_source = 479849602; + // Info button color + string info_button_color = 479402238; + // Primary button color + string primary_button_color = 244794573; + // Toggle to show filters. Defaults to true. + bool show_filters_bar = 313221091; + // Toggle to show the title. Defaults to true. + bool show_title = 281598885; + // Text color for text tiles + string text_tile_text_color = 296014652; + // Background color for tiles + string tile_background_color = 472118176; + // Text color for tiles + string tile_text_color = 482028031; + // Color for titles + string title_color = 422398302; + // Warning button color + string warn_button_color = 343910760; + // The text alignment of tile titles (New Dashboards) + string tile_title_alignment = 278794058; + // Toggles the tile shadow (New Dashboards) + bool tile_shadow = 410871563; +} + + +message Timezone { + // Timezone + string value = 111972721; + // Description of timezone + string label = 102727412; + // Timezone group (e.g Common, Other, etc.) + string group = 98629247; +} + + +message UpdateCommand { + // Name of the command + string name = 3373707; + // Description of the command + string description = 431136513; +} + + +message UpdateFolder { + // Unique Name + string name = 3373707; + // Id of Parent. If the parent id is null, this is a root-level entry + string parent_id = 517581876; +} + + +message User { + // Operations the current user is able to perform on this object + map can = 98256; + // URL for the avatar image (may be generic) + string avatar_url = 402824823; + // URL for the avatar image (may be generic), does not specify size + string avatar_url_without_sizing = 444474714; + // API 3 credentials + repeated CredentialsApi3 credentials_api3 = 525961425; + CredentialsEmail credentials_email = 395554227; + // Embed credentials + repeated CredentialsEmbed credentials_embed = 395553813; + CredentialsGoogle credentials_google = 326240567; + CredentialsLDAP credentials_ldap = 525882429; + CredentialsLookerOpenid credentials_looker_openid = 339831418; + CredentialsOIDC credentials_oidc = 525858865; + CredentialsSaml credentials_saml = 525830924; + CredentialsTotp credentials_totp = 525820057; + // Full name for display (available only if both first_name and last_name are set) + string display_name = 403771642; + // EMail address + string email = 96619420; + // (Embed only) ID of user's group space based on the external_group_id optionally specified during embed user login + int64 embed_group_space_id = 358009270; + // First name + string first_name = 160985414; + // Array of ids of the groups for this user + repeated int64 group_ids = 370665154; + // ID string for user's home folder + string home_folder_id = 126256340; + // Unique Id + int64 id = 3355; + // Account has been disabled + bool is_disabled = 464089615; + // Last name + string last_name = 503280549; + // User's preferred locale. User locale takes precedence over Looker's system-wide default locale. Locale determines language of display strings and date and numeric formatting in API responses. Locale string must be a 2 letter language code or a combination of language code and region code: 'en' or 'en-US', for example. + string locale = 274365545; + // Array of strings representing the Looker versions that this user has used (this only goes back as far as '3.54.0') + repeated string looker_versions = 470584964; + // User's dev workspace has been checked for presence of applicable production projects + bool models_dir_validated = 476587036; + // ID of user's personal folder + int64 personal_folder_id = 468432428; + // User is identified as an employee of Looker + bool presumed_looker_employee = 507302581; + // Array of ids of the roles for this user + repeated int64 role_ids = 266265745; + // Active sessions + repeated Session sessions = 351269927; + // Per user dictionary of undocumented state information owned by the Looker UI. + map ui_state = 360744045; + // User is identified as an employee of Looker who has been verified via Looker corporate authentication + bool verified_looker_employee = 471763269; + // User's roles are managed by an external directory like SAML or LDAP and can not be changed directly. + bool roles_externally_managed = 413287133; + // User can be directly assigned a role. + bool allow_direct_roles = 315512839; + // User can be a direct member of a normal Looker group. + bool allow_normal_group_membership = 480346046; + // User can inherit roles from a normal Looker group. + bool allow_roles_from_normal_groups = 302729383; + // Link to get this item + string url = 116079; +} + + +message UserAttribute { + // Operations the current user is able to perform on this object + map can = 98256; + // Unique Id + int64 id = 3355; + // Name of user attribute + string name = 3373707; + // Human-friendly label for user attribute + string label = 102727412; + // Type of user attribute ("string", "number", "datetime", "yesno", "zipcode") + string type = 3575610; + // Default value for when no value is set on the user + string default_value = 329667964; + // Attribute is a system default + bool is_system = 423289116; + // Attribute is permanent and cannot be deleted + bool is_permanent = 321308691; + // If true, users will not be able to view values of this attribute + bool value_is_hidden = 386768708; + // Non-admin users can see the values of their attributes and use them in filters + bool user_can_view = 75812952; + // Users can change the value of this attribute for themselves + bool user_can_edit = 76324083; + // Destinations to which a hidden attribute may be sent. Once set, cannot be edited. + string hidden_value_domain_whitelist = 389149836; +} + +// An array of user attribute types that are allowed to be used in filters on this field. Valid values are: "advanced_filter_string", "advanced_filter_number", "advanced_filter_datetime", "string", "number", "datetime", "relative_url", "yesno", "zipcode". +enum UserAttributeFilterTypes { + _USER_ATTRIBUTE_FILTER_TYPES_UNSET = 0; + USER_ATTRIBUTE_FILTER_TYPES_ADVANCED_FILTER_STRING = 38119857; + USER_ATTRIBUTE_FILTER_TYPES_ADVANCED_FILTER_NUMBER = 104258327; + USER_ATTRIBUTE_FILTER_TYPES_ADVANCED_FILTER_DATETIME = 505706937; + USER_ATTRIBUTE_FILTER_TYPES_STRING = 269933081; + USER_ATTRIBUTE_FILTER_TYPES_NUMBER = 468677071; + USER_ATTRIBUTE_FILTER_TYPES_DATETIME = 440156011; + USER_ATTRIBUTE_FILTER_TYPES_RELATIVE_URL = 485099084; + USER_ATTRIBUTE_FILTER_TYPES_YESNO = 395679271; + USER_ATTRIBUTE_FILTER_TYPES_ZIPCODE = 356288364; +} + + +message UserAttributeGroupValue { + // Operations the current user is able to perform on this object + map can = 98256; + // Unique Id of this group-attribute relation + int64 id = 3355; + // Id of group + int64 group_id = 506361563; + // Id of user attribute + int64 user_attribute_id = 534680475; + // If true, the "value" field will be null, because the attribute settings block access to this value + bool value_is_hidden = 386768708; + // Precedence for resolving value for user + int64 rank = 3492908; + // Value of user attribute for group + string value = 111972721; +} + + +message UserAttributeWithValue { + // Operations the current user is able to perform on this object + map can = 98256; + // Name of user attribute + string name = 3373707; + // Human-friendly label for user attribute + string label = 102727412; + // Precedence for setting value on user (lowest wins) + int64 rank = 3492908; + // Value of attribute for user + string value = 111972721; + // Id of User + int64 user_id = 147132913; + // Can the user set this value + bool user_can_edit = 76324083; + // If true, the "value" field will be null, because the attribute settings block access to this value + bool value_is_hidden = 386768708; + // Id of User Attribute + int64 user_attribute_id = 534680475; + // How user got this value for this attribute + string source = 448252914; + // If this user attribute is hidden, whitelist of destinations to which it may be sent. + string hidden_value_domain_whitelist = 389149836; +} + + +message UserLoginLockout { + // Operations the current user is able to perform on this object + map can = 98256; + // Hash of user's client id + string key = 106079; + // Authentication method for login failures + string auth_type = 365227228; + // IP address of most recent failed attempt + string ip = 3367; + // User ID + int64 user_id = 147132913; + // Remote ID of user if using LDAP + string remote_id = 320131821; + // User's name + string full_name = 419294065; + // Email address associated with the user's account + string email = 96619420; + // Number of failures that triggered the lockout + int64 fail_count = 393297070; + // Time when lockout was triggered + google.protobuf.Timestamp lockout_at = 423752683; +} + + +message UserPublic { + // Operations the current user is able to perform on this object + map can = 98256; + // Unique Id + int64 id = 3355; + // First Name + string first_name = 160985414; + // Last Name + string last_name = 503280549; + // Full name for display (available only if both first_name and last_name are set) + string display_name = 403771642; + // URL for the avatar image (may be generic) + string avatar_url = 402824823; + // Link to get this item + string url = 116079; +} + + +message ValidationError { + // Error details + string message = 477462531; + // Error detail array + repeated ValidationErrorDetail errors = 323658789; + // Documentation link + string documentation_url = 282180250; +} + + +message ValidationErrorDetail { + // Field with error + string field = 97427706; + // Error code + string code = 3059181; + // Error info message + string message = 477462531; + // Documentation link + string documentation_url = 282180250; +} + +// The name of the starting day of the week. Valid values are: "monday", "tuesday", "wednesday", "thursday", "friday", "saturday", "sunday". +enum WeekStartDay { + _WEEK_START_DAY_UNSET = 0; + WEEK_START_DAY_MONDAY = 404658929; + WEEK_START_DAY_TUESDAY = 425798095; + WEEK_START_DAY_WEDNESDAY = 494504202; + WEEK_START_DAY_THURSDAY = 293816977; + WEEK_START_DAY_FRIDAY = 454104541; + WEEK_START_DAY_SATURDAY = 445994505; + WEEK_START_DAY_SUNDAY = 360329921; +} + + +message WelcomeEmailTest { + // The content that would be sent in the body of a custom welcome email + string content = 475765308; + // The subject that would be sent for the custom welcome email + string subject = 466971317; + // The header that would be sent in the body of a custom welcome email + string header = 305317724; +} + + +message WhitelabelConfiguration { + // Operations the current user is able to perform on this object + map can = 98256; + // Unique Id + int64 id = 3355; + // Customer logo image. Expected base64 encoded data (write-only) + string logo_file = 390205724; + // Logo image url (read-only) + string logo_url = 506968886; + // Custom favicon image. Expected base64 encoded data (write-only) + string favicon_file = 516846492; + // Favicon image url (read-only) + string favicon_url = 243764420; + // Default page title + string default_title = 329267638; + // Boolean to toggle showing help menus + bool show_help_menu = 295555474; + // Boolean to toggle showing docs + bool show_docs = 475949680; + // Boolean to toggle showing email subscription options. + bool show_email_sub_options = 482126522; + // Boolean to toggle mentions of Looker in emails + bool allow_looker_mentions = 454767734; + // Boolean to toggle links to Looker in emails + bool allow_looker_links = 142583420; + // Allow subject line and email heading customization in customized emails” + bool custom_welcome_email_advanced = 25101424; + // Remove the word Looker from appearing in the account setup page + bool setup_mentions = 311522906; + // Remove Looker logo from Alerts + bool alerts_logo = 51977645; + // Remove Looker links from Alerts + bool alerts_links = 402869755; + // Remove Looker mentions in home folder page when you don’t have any items saved + bool folders_mentions = 269755200; +} + + +message Workspace { + // Operations the current user is able to perform on this object + map can = 98256; + // The unique id of this user workspace. Predefined workspace ids include "production" and "dev" + string id = 3355; + // The local state of each project in the workspace + repeated Project projects = 499348419; +} + +// Dynamically generated writeable type for ApiSession removes properties: + // can, sudo_user_id +message WriteApiSession { + // The id of active workspace for this session + string workspace_id = 394620993; +} + +// Dynamically generated writeable type for BackupConfiguration removes properties: + // can, url +message WriteBackupConfiguration { + // Type of backup: looker-s3 or custom-s3 + string type = 3575610; + // Name of bucket for custom-s3 backups + string custom_s3_bucket = 358719282; + // Name of region where the bucket is located + string custom_s3_bucket_region = 278083340; + // (Write-Only) AWS S3 key used for custom-s3 backups + string custom_s3_key = 277162771; + // (Write-Only) AWS S3 secret used for custom-s3 backups + string custom_s3_secret = 245512607; +} + +// Dynamically generated writeable type for Board removes properties: + // can, content_metadata_id, created_at, board_sections, id, updated_at, user_id, primary_homepage +message WriteBoard { + // Date of board deletion + google.protobuf.Timestamp deleted_at = 441264006; + // Description of the board + string description = 431136513; + // ids of the board sections in the order they should be displayed + repeated int64 section_order = 328304035; + // Title of the board + string title = 110371416; +} + +// Dynamically generated writeable type for BoardItem removes properties: + // can, content_created_by, content_favorite_id, content_metadata_id, content_updated_at, description, favorite_count, id, location, title, url, view_count +message WriteBoardItem { + // Dashboard to base this item on + int64 dashboard_id = 496187565; + // Associated Board Section + int64 board_section_id = 501228011; + // Look to base this item on + int64 look_id = 349778619; + // LookML Dashboard to base this item on + string lookml_dashboard_id = 339184839; + // An arbitrary integer representing the sort order within the section + int64 order = 106006350; +} + +// Dynamically generated writeable type for BoardSection removes properties: + // can, created_at, board_items, id, updated_at +message WriteBoardSection { + // Time at which this section was deleted. + google.protobuf.Timestamp deleted_at = 441264006; + // Description of the content found in this section. + string description = 431136513; + // Id reference to parent board + int64 board_id = 402691141; + // ids of the board items in the order they should be displayed + repeated int64 item_order = 488938807; + // Name of row + string title = 110371416; +} + +// Dynamically generated writeable type for ColorCollection removes properties: + // id +message WriteColorCollection { + // Label of color collection + string label = 102727412; + // Array of categorical palette definitions + repeated DiscretePalette categoricalPalettes = 181997160; + // Array of discrete palette definitions + repeated ContinuousPalette sequentialPalettes = 296477661; + // Array of diverging palette definitions + repeated ContinuousPalette divergingPalettes = 417697719; +} + +// Dynamically generated writeable type for Command removes properties: + // id, author_id +message WriteCommand { + // Name of the command + string name = 3373707; + // Description of the command + string description = 431136513; + // Id of the content associated with the command + string linked_content_id = 63843193; + // Name of the command Valid values are: "dashboard", "lookml_dashboard". + LinkedContentType linked_content_type = 305853718; +} + +// Dynamically generated writeable type for ContentFavorite removes properties: + // id, look_id, dashboard_id, board_id +message WriteContentFavorite { + // User Id which owns this ContentFavorite + int64 user_id = 147132913; + // Content Metadata Id associated with this ContentFavorite + int64 content_metadata_id = 293222822; + WriteLookBasic look = 3327647; + WriteDashboardBase dashboard = 523930294; +} + +// Dynamically generated writeable type for ContentMeta removes properties: + // can, id, name, parent_id, dashboard_id, look_id, folder_id, content_type, inheriting_id, slug +message WriteContentMeta { + // Whether content inherits its access levels from parent + bool inherits = 226856664; +} + +// Dynamically generated writeable type for CreateDashboardFilter removes properties: + // id, field +message WriteCreateDashboardFilter { + // Id of Dashboard + string dashboard_id = 496187565; + // Name of filter + string name = 3373707; + // Title of filter + string title = 110371416; + // Type of filter: one of date, number, string, or field + string type = 3575610; + // Default value of filter + string default_value = 329667964; + // Model of filter (required if type = field) + string model = 104069929; + // Explore of filter (required if type = field) + string explore = 327287131; + // Dimension of filter (required if type = field) + string dimension = 273753254; + // Display order of this filter relative to other filters + int64 row = 113114; + // Array of listeners for faceted filters + repeated string listens_to_filters = 326256077; + // Whether the filter allows multiple filter values + bool allow_multiple_values = 444540061; + // Whether the filter requires a value to run the dashboard + bool required = 393139297; + // The visual configuration for this filter. Used to set up how the UI for this filter should appear. + map ui_config = 338401068; +} + +// Dynamically generated writeable type for CreateQueryTask removes properties: + // can +message WriteCreateQueryTask { + // Id of query to run + int64 query_id = 291663619; + // Desired async query result format. Valid values are: "inline_json", "json", "json_detail", "json_fe", "csv", "html", "md", "txt", "xlsx", "gsxml". + ResultFormat result_format = 283324265; + // Source of query task + string source = 448252914; + // Create the task but defer execution + bool deferred = 323945455; + // Id of look associated with query. + int64 look_id = 349778619; + // Id of dashboard associated with query. + string dashboard_id = 496187565; +} + +// Dynamically generated writeable type for CredentialsEmail removes properties: + // can, created_at, is_disabled, logged_in_at, password_reset_url, type, url, user_url +message WriteCredentialsEmail { + // EMail address used for user login + string email = 96619420; + // Force the user to change their password upon their next login + bool forced_password_reset_at_next_login = 457930717; +} + +// Dynamically generated writeable type for CustomWelcomeEmail removes properties: + // can +message WriteCustomWelcomeEmail { + // If true, custom email content will replace the default body of welcome emails + bool enabled = 402398511; + // The HTML to use as custom content for welcome emails. Script elements and other potentially dangerous markup will be removed. + string content = 475765308; + // The text to appear in the email subject line. + string subject = 466971317; + // The text to appear in the header line of the email body. + string header = 305317724; +} + +// Dynamically generated writeable type for Dashboard removes properties: + // can, content_favorite_id, content_metadata_id, id, model, readonly, refresh_interval_to_i, user_id, created_at, dashboard_elements, dashboard_filters, dashboard_layouts, deleted_at, deleter_id, edit_uri, favorite_count, last_accessed_at, last_viewed_at, view_count +message WriteDashboard { + // Description + string description = 431136513; + // Is Hidden + bool hidden = 304371861; + // Timezone in which the Dashboard will run by default. + string query_timezone = 202424720; + // Refresh Interval, as a time duration phrase like "2 hours 30 minutes". A number with no time units will be interpreted as whole seconds. + string refresh_interval = 454206058; + WriteFolderBase folder = 317241572; + // Dashboard Title + string title = 110371416; + // Background color + string background_color = 509195076; + // Enables crossfiltering in dashboards - only available in dashboards-next (beta) + bool crossfilter_enabled = 315812131; + // Whether or not a dashboard is 'soft' deleted. + bool deleted = 387615750; + // configuration option that governs how dashboard loading will happen. + string load_configuration = 278159272; + // Links this dashboard to a particular LookML dashboard such that calling a **sync** operation on that LookML dashboard will update this dashboard to match. + string lookml_link_id = 458227247; + // Show filters bar. **Security Note:** This property only affects the *cosmetic* appearance of the dashboard, not a user's ability to access data. Hiding the filters bar does **NOT** prevent users from changing filters by other means. For information on how to set up secure data access control policies, see [Control User Access to Data](https://looker.com/docs/r/api/control-access) + bool show_filters_bar = 313221091; + // Show title + bool show_title = 281598885; + // Content Metadata Slug + string slug = 3533483; + // Id of folder + string folder_id = 527488652; + // Color of text on text tiles + string text_tile_text_color = 296014652; + // Tile background color + string tile_background_color = 472118176; + // Tile text color + string tile_text_color = 482028031; + // Title color + string title_color = 422398302; + DashboardAppearance appearance = 449179417; + // The preferred route for viewing this dashboard (ie: dashboards or dashboards-next) + string preferred_viewer = 518361060; +} + +// Dynamically generated writeable type for DashboardBase removes properties: + // can, content_favorite_id, content_metadata_id, description, hidden, id, model, query_timezone, readonly, refresh_interval, refresh_interval_to_i, title, user_id +message WriteDashboardBase { + WriteFolderBase folder = 317241572; +} + +// Dynamically generated writeable type for DashboardElement removes properties: + // can, body_text_as_html, edit_uri, id, lookml_link_id, note_text_as_html, refresh_interval_to_i, alert_count, title_text_as_html, subtitle_text_as_html +message WriteDashboardElement { + // Text tile body text + string body_text = 313253482; + // Id of Dashboard + string dashboard_id = 496187565; + WriteLookWithQuery look = 3327647; + // Id Of Look + string look_id = 349778619; + // ID of merge result + string merge_result_id = 374234034; + // Note Display + string note_display = 33923723; + // Note State + string note_state = 312488702; + // Note Text + string note_text = 445242710; + WriteQuery query = 107944136; + // Id Of Query + int64 query_id = 291663619; + // Refresh Interval + string refresh_interval = 454206058; + WriteResultMakerWithIdVisConfigAndDynamicFields result_maker = 523671970; + // ID of the ResultMakerLookup entry. + int64 result_maker_id = 347633462; + // Text tile subtitle text + string subtitle_text = 527614074; + // Title of dashboard element + string title = 110371416; + // Whether title is hidden + bool title_hidden = 278123192; + // Text tile title + string title_text = 443341651; + // Type + string type = 3575610; +} + +// Dynamically generated writeable type for DashboardFilter removes properties: + // can, id, dashboard_id, field +message WriteDashboardFilter { + // Name of filter + string name = 3373707; + // Title of filter + string title = 110371416; + // Type of filter: one of date, number, string, or field + string type = 3575610; + // Default value of filter + string default_value = 329667964; + // Model of filter (required if type = field) + string model = 104069929; + // Explore of filter (required if type = field) + string explore = 327287131; + // Dimension of filter (required if type = field) + string dimension = 273753254; + // Display order of this filter relative to other filters + int64 row = 113114; + // Array of listeners for faceted filters + repeated string listens_to_filters = 326256077; + // Whether the filter allows multiple filter values + bool allow_multiple_values = 444540061; + // Whether the filter requires a value to run the dashboard + bool required = 393139297; + // The visual configuration for this filter. Used to set up how the UI for this filter should appear. + map ui_config = 338401068; +} + +// Dynamically generated writeable type for DashboardLayout removes properties: + // can, id, deleted, dashboard_title, dashboard_layout_components +message WriteDashboardLayout { + // Id of Dashboard + string dashboard_id = 496187565; + // Type + string type = 3575610; + // Is Active + bool active = 355737662; + // Column Width + int64 column_width = 104395875; + // Width + int64 width = 113126854; +} + +// Dynamically generated writeable type for DashboardLayoutComponent removes properties: + // can, id, deleted, element_title, element_title_hidden, vis_type +message WriteDashboardLayoutComponent { + // Id of Dashboard Layout + string dashboard_layout_id = 343841433; + // Id Of Dashboard Element + string dashboard_element_id = 79370295; + // Row + int64 row = 113114; + // Column + int64 column = 338709290; + // Width + int64 width = 113126854; + // Height + int64 height = 305257398; +} + +// Dynamically generated writeable type for Datagroup removes properties: + // can, created_at, id, model_name, name, trigger_check_at, trigger_error, trigger_value +message WriteDatagroup { + // UNIX timestamp before which cache entries are considered stale. Cannot be in the future. + int64 stale_before = 349731513; + // UNIX timestamp at which this entry became triggered. Cannot be in the future. + int64 triggered_at = 380872987; +} + +// Dynamically generated writeable type for DBConnection removes properties: + // can, dialect, snippets, pdts_enabled, uses_oauth, created_at, user_id, example, last_regen_at, last_reap_at, managed +message WriteDBConnection { + // Name of the connection. Also used as the unique identifier + string name = 3373707; + // Host name/address of server + string host = 3208616; + // Port number on server + int64 port = 3446913; + // Username for server authentication + string username = 265713450; + // (Write-Only) Password for server authentication + string password = 304246438; + // (Write-Only) Base64 encoded Certificate body for server authentication (when appropriate for dialect). + string certificate = 488099941; + // (Write-Only) Certificate keyfile type - .json or .p12 + string file_type = 329066488; + // Database name + string database = 447366238; + // Time zone of database + string db_timezone = 462146893; + // Timezone to use in queries + string query_timezone = 202424720; + // Scheme name + string schema = 453993775; + // Maximum number of concurrent connection to use + int64 max_connections = 426218758; + // Maximum size of query in GBs (BigQuery only, can be a user_attribute name) + string max_billing_gigabytes = 76432944; + // Use SSL/TLS when connecting to server + bool ssl = 114188; + // Verify the SSL + bool verify_ssl = 516339549; + // Name of temporary database (if used) + string tmp_db_name = 503272258; + // Additional params to add to JDBC connection string + string jdbc_additional_params = 377926166; + // Connection Pool Timeout, in seconds + int64 pool_timeout = 386536432; + // (Read/Write) SQL Dialect name + string dialect_name = 453584193; + // (Limited access feature) Are per user db credentials enabled. Enabling will remove previously set username and password + bool user_db_credentials = 401569003; + // Fields whose values map to user attribute names + repeated string user_attribute_fields = 501842924; + // Cron string specifying when maintenance such as PDT trigger checks and drops should be performed + string maintenance_cron = 408001806; + // Precache tables in the SQL Runner + bool sql_runner_precache_tables = 290662041; + // SQL statements (semicolon separated) to issue after connecting to the database. Requires `custom_after_connect_statements` license feature + string after_connect_statements = 39151260; + WriteDBConnectionOverride pdt_context_override = 355387537; + // The Id of the ssh tunnel this connection uses + string tunnel_id = 313005097; + // Maximum number of threads to use to build PDTs in parallel + int64 pdt_concurrency = 395006102; + // When disable_context_comment is true comment will not be added to SQL + bool disable_context_comment = 390654690; + // An External OAuth Application to use for authenticating to the database + int64 oauth_application_id = 466979396; +} + +// Dynamically generated writeable type for DBConnectionOverride removes properties: + // has_password +message WriteDBConnectionOverride { + // Context in which to override (`pdt` is the only allowed value) + string context = 475765463; + // Host name/address of server + string host = 3208616; + // Port number on server + string port = 3446913; + // Username for server authentication + string username = 265713450; + // (Write-Only) Password for server authentication + string password = 304246438; + // (Write-Only) Base64 encoded Certificate body for server authentication (when appropriate for dialect). + string certificate = 488099941; + // (Write-Only) Certificate keyfile type - .json or .p12 + string file_type = 329066488; + // Database name + string database = 447366238; + // Scheme name + string schema = 453993775; + // Additional params to add to JDBC connection string + string jdbc_additional_params = 377926166; + // SQL statements (semicolon separated) to issue after connecting to the database. Requires `custom_after_connect_statements` license feature + string after_connect_statements = 39151260; +} + +// Dynamically generated writeable type for ExternalOauthApplication removes properties: + // can, id, created_at +message WriteExternalOauthApplication { + // The name of this application. For Snowflake connections, this should be the name of the host database. + string name = 3373707; + // The OAuth Client ID for this application + string client_id = 476022396; + // (Write-Only) The OAuth Client Secret for this application + string client_secret = 278906578; + // The database dialect for this application. + string dialect_name = 453584193; +} + +// Dynamically generated writeable type for FolderBase removes properties: + // id, content_metadata_id, created_at, creator_id, child_count, external_id, is_embed, is_embed_shared_root, is_embed_users_root, is_personal, is_personal_descendant, is_shared_root, is_users_root, can +message WriteFolderBase { + // Unique Name + string name = 3373707; + // Id of Parent. If the parent id is null, this is a root-level entry + string parent_id = 517581876; +} + +// Dynamically generated writeable type for GitBranch removes properties: + // can, remote, remote_name, error, message, owner_name, readonly, personal, is_local, is_remote, is_production, ahead_count, behind_count, commit_at, remote_ref +message WriteGitBranch { + // The short name on the local. Updating `name` results in `git checkout ` + string name = 3373707; + // The resolved ref of this branch. Updating `ref` results in `git reset --hard ``. + string ref = 112787; +} + +// Dynamically generated writeable type for Group removes properties: + // can, contains_current_user, external_group_id, externally_managed, id, include_by_default, user_count +message WriteGroup { + // Group can be used in content access controls + bool can_add_to_content_metadata = 97036652; + // Name of group + string name = 3373707; +} + +// Dynamically generated writeable type for Integration removes properties: + // can, id, integration_hub_id, label, description, supported_formats, supported_action_types, supported_formattings, supported_visualization_formattings, supported_download_settings, icon_url, uses_oauth, required_fields, delegate_oauth +message WriteIntegration { + // Whether the integration is available to users. + bool enabled = 402398511; + // Array of params for the integration. + repeated IntegrationParam params = 497713981; + // Whether the integration is available to users. + repeated int64 installed_delegate_oauth_targets = 448297026; +} + +// Dynamically generated writeable type for IntegrationHub removes properties: + // can, id, label, official, fetch_error_message, has_authorization_token, legal_agreement_signed, legal_agreement_required, legal_agreement_text +message WriteIntegrationHub { + // URL of the hub. + string url = 116079; + // (Write-Only) An authorization key that will be sent to the integration hub on every request. + string authorization_token = 369213356; +} + +// Dynamically generated writeable type for InternalHelpResources removes properties: + // can +message WriteInternalHelpResources { + // If true and internal help resources content is not blank then the link for internal help resources will be shown in the help menu and the content displayed within Looker + bool enabled = 402398511; +} + +// Dynamically generated writeable type for InternalHelpResourcesContent removes properties: + // can +message WriteInternalHelpResourcesContent { + // Text to display in the help menu item which will display the internal help resources + string organization_name = 425834740; + // Content to be displayed in the internal help resources page/modal + string markdown_content = 165250775; +} + +// Dynamically generated writeable type for LDAPConfig removes properties: + // can, default_new_user_groups, default_new_user_roles, groups, has_auth_password, modified_at, modified_by, user_attributes, url +message WriteLDAPConfig { + // Allow alternate email-based login via '/login/email' for admins and for specified users with the 'login_special_email' permission. This option is useful as a fallback during ldap setup, if ldap config problems occur later, or if you need to support some users who are not in your ldap directory. Looker email/password logins are always disabled for regular users when ldap is enabled. + bool alternate_email_login_allowed = 332315627; + // (Write-Only) Password for the LDAP account used to access the LDAP server + string auth_password = 415503534; + // Users will not be allowed to login at all unless a role for them is found in LDAP if set to true + bool auth_requires_role = 488630792; + // Distinguished name of LDAP account used to access the LDAP server + string auth_username = 474550684; + // LDAP server hostname + string connection_host = 365749556; + // LDAP host port + string connection_port = 365868705; + // Use Transport Layer Security + bool connection_tls = 317633542; + // Do not verify peer when using TLS + bool connection_tls_no_verify = 432426857; + // (Write-Only) Array of ids of groups that will be applied to new users the first time they login via LDAP + repeated int64 default_new_user_group_ids = 91703103; + // (Write-Only) Array of ids of roles that will be applied to new users the first time they login via LDAP + repeated int64 default_new_user_role_ids = 429566625; + // Enable/Disable LDAP authentication for the server + bool enabled = 402398511; + // Don't attempt to do LDAP search result paging (RFC 2696) even if the LDAP server claims to support it. + bool force_no_page = 344726209; + // Base dn for finding groups in LDAP searches + string groups_base_dn = 456083027; + // Identifier for a strategy for how Looker will search for groups in the LDAP server + string groups_finder_type = 336365796; + // LDAP Group attribute that signifies the members of the groups. Most commonly 'member' + string groups_member_attribute = 281577616; + // Optional comma-separated list of supported LDAP objectclass for groups when doing groups searches + string groups_objectclasses = 270841289; + // LDAP Group attribute that signifies the user in a group. Most commonly 'dn' + string groups_user_attribute = 467193795; + // (Read/Write) Array of mappings between LDAP Groups and arrays of Looker Role ids + repeated LDAPGroupWrite groups_with_role_ids = 475300071; + // Merge first-time ldap login to existing user account by email addresses. When a user logs in for the first time via ldap this option will connect this user into their existing account by finding the account with a matching email address. Otherwise a new user account will be created for the user. + bool merge_new_users_by_email = 391067523; + // Set user roles in Looker based on groups from LDAP + bool set_roles_from_groups = 441288982; + // (Write-Only) Test LDAP user password. For ldap tests only. + string test_ldap_password = 71095482; + // (Write-Only) Test LDAP user login id. For ldap tests only. + string test_ldap_user = 159290890; + // Name of user record attributes used to indicate email address field + string user_attribute_map_email = 384506455; + // Name of user record attributes used to indicate first name + string user_attribute_map_first_name = 401269830; + // Name of user record attributes used to indicate last name + string user_attribute_map_last_name = 506361433; + // Name of user record attributes used to indicate unique record id + string user_attribute_map_ldap_id = 271507598; + // (Read/Write) Array of mappings between LDAP User Attributes and arrays of Looker User Attribute ids + repeated LDAPUserAttributeWrite user_attributes_with_ids = 514666905; + // Distinguished name of LDAP node used as the base for user searches + string user_bind_base_dn = 468855466; + // (Optional) Custom RFC-2254 filter clause for use in finding user during login. Combined via 'and' with the other generated filter clauses. + string user_custom_filter = 359623314; + // Name(s) of user record attributes used for matching user login id (comma separated list) + string user_id_attribute_names = 495858029; + // (Optional) Name of user record objectclass used for finding user during login id + string user_objectclass = 365251017; + // Allow LDAP auth'd users to be members of non-reflected Looker groups. If 'false', user will be removed from non-reflected groups on login. + bool allow_normal_group_membership = 480346046; + // LDAP auth'd users will be able to inherit roles from non-reflected Looker groups. + bool allow_roles_from_normal_groups = 302729383; + // Allows roles to be directly assigned to LDAP auth'd users. + bool allow_direct_roles = 315512839; +} + +// Dynamically generated writeable type for LegacyFeature removes properties: + // can, id, name, description, enabled, disallowed_as_of_version, disable_on_upgrade_to_version, end_of_life_version, documentation_url, approximate_disable_date, approximate_end_of_life_date, has_disabled_on_upgrade +message WriteLegacyFeature { + // Whether this feature has been enabled by a user + bool enabled_locally = 472119345; +} + +// Dynamically generated writeable type for LookBasic removes properties: + // can, content_metadata_id, id, title +message WriteLookBasic { + // User Id + int64 user_id = 147132913; +} + +// Dynamically generated writeable type for LookmlModel removes properties: + // can, explores, has_content, label +message WriteLookmlModel { + // Array of names of connections this model is allowed to use + repeated string allowed_db_connection_names = 482839655; + // Name of the model. Also used as the unique identifier + string name = 3373707; + // Name of project containing the model + string project_name = 469694456; + // Is this model allowed to use all current and future connections + bool unlimited_db_connections = 419500594; +} + +// Dynamically generated writeable type for LookWithQuery removes properties: + // can, content_metadata_id, id, content_favorite_id, created_at, deleted_at, deleter_id, embed_url, excel_file_url, favorite_count, google_spreadsheet_formula, image_embed_url, last_accessed_at, last_updater_id, last_viewed_at, model, public_slug, public_url, short_url, updated_at, view_count, url +message WriteLookWithQuery { + // Look Title + string title = 110371416; + // User Id + int64 user_id = 147132913; + // Whether or not a look is 'soft' deleted. + bool deleted = 387615750; + // Description + string description = 431136513; + // auto-run query when Look viewed + bool is_run_on_load = 494569088; + // Is Public + bool public = 488711883; + // Query Id + int64 query_id = 291663619; + WriteFolderBase folder = 317241572; + // Folder Id + string folder_id = 527488652; + WriteQuery query = 107944136; +} + +// Dynamically generated writeable type for MergeQuery removes properties: + // can, id, result_maker_id +message WriteMergeQuery { + // Column Limit + string column_limit = 114546286; + // Dynamic Fields + string dynamic_fields = 74327737; + // Pivots + repeated string pivots = 493953479; + // Sorts + repeated string sorts = 109624981; + // Source Queries defining the results to be merged. + repeated MergeQuerySourceQuery source_queries = 396289744; + // Total + bool total = 110549828; + // Visualization Config + map vis_config = 422614288; +} + +// Dynamically generated writeable type for ModelSet removes properties: + // can, all_access, built_in, id, url +message WriteModelSet { + repeated string models = 534399691; + // Name of ModelSet + string name = 3373707; +} + +// Dynamically generated writeable type for OauthClientApp removes properties: + // can, client_guid, tokens_invalid_before, activated_users +message WriteOauthClientApp { + // The uri with which this application will receive an auth code by browser redirect. + string redirect_uri = 475615044; + // The application's display name + string display_name = 403771642; + // A description of the application that will be displayed to users + string description = 431136513; + // When enabled is true, OAuth2 and API requests will be accepted from this app. When false, all requests from this app will be refused. + bool enabled = 402398511; + // If set, only Looker users who are members of this group can use this web app with Looker. If group_id is not set, any Looker user may use this app to access this Looker instance + int64 group_id = 506361563; +} + +// Dynamically generated writeable type for OIDCConfig removes properties: + // can, default_new_user_groups, default_new_user_roles, groups, modified_at, modified_by, test_slug, user_attributes, url +message WriteOIDCConfig { + // Allow alternate email-based login via '/login/email' for admins and for specified users with the 'login_special_email' permission. This option is useful as a fallback during ldap setup, if ldap config problems occur later, or if you need to support some users who are not in your ldap directory. Looker email/password logins are always disabled for regular users when ldap is enabled. + bool alternate_email_login_allowed = 332315627; + // OpenID Provider Audience + string audience = 487814402; + // Users will not be allowed to login at all unless a role for them is found in OIDC if set to true + bool auth_requires_role = 488630792; + // OpenID Provider Authorization Url + string authorization_endpoint = 535116198; + // (Write-Only) Array of ids of groups that will be applied to new users the first time they login via OIDC + repeated int64 default_new_user_group_ids = 91703103; + // (Write-Only) Array of ids of roles that will be applied to new users the first time they login via OIDC + repeated int64 default_new_user_role_ids = 429566625; + // Enable/Disable OIDC authentication for the server + bool enabled = 402398511; + // Name of user record attributes used to indicate groups. Used when 'groups_finder_type' is set to 'grouped_attribute_values' + string groups_attribute = 407121539; + // (Read/Write) Array of mappings between OIDC Groups and arrays of Looker Role ids + repeated OIDCGroupWrite groups_with_role_ids = 475300071; + // Relying Party Identifier (provided by OpenID Provider) + string identifier = 404608213; + // OpenID Provider Issuer + string issuer = 294789969; + // Merge first-time oidc login to existing user account by email addresses. When a user logs in for the first time via oidc this option will connect this user into their existing account by finding the account with a matching email address by testing the given types of credentials for existing users. Otherwise a new user account will be created for the user. This list (if provided) must be a comma separated list of string like 'email,ldap,google' + string new_user_migration_types = 348180790; + // Array of scopes to request. + repeated string scopes = 453884336; + // (Write-Only) Relying Party Secret (provided by OpenID Provider) + string secret = 453138600; + // Set user roles in Looker based on groups from OIDC + bool set_roles_from_groups = 441288982; + // OpenID Provider Token Url + string token_endpoint = 495238427; + // Name of user record attributes used to indicate email address field + string user_attribute_map_email = 384506455; + // Name of user record attributes used to indicate first name + string user_attribute_map_first_name = 401269830; + // Name of user record attributes used to indicate last name + string user_attribute_map_last_name = 506361433; + // (Read/Write) Array of mappings between OIDC User Attributes and arrays of Looker User Attribute ids + repeated OIDCUserAttributeWrite user_attributes_with_ids = 514666905; + // OpenID Provider User Information Url + string userinfo_endpoint = 146023451; + // Allow OIDC auth'd users to be members of non-reflected Looker groups. If 'false', user will be removed from non-reflected groups on login. + bool allow_normal_group_membership = 480346046; + // OIDC auth'd users will inherit roles from non-reflected Looker groups. + bool allow_roles_from_normal_groups = 302729383; + // Allows roles to be directly assigned to OIDC auth'd users. + bool allow_direct_roles = 315512839; +} + +// Dynamically generated writeable type for PasswordConfig removes properties: + // can +message WritePasswordConfig { + // Minimum number of characters required for a new password. Must be between 7 and 100 + int64 min_length = 355788614; + // Require at least one numeric character + bool require_numeric = 381969292; + // Require at least one uppercase and one lowercase letter + bool require_upperlower = 533074595; + // Require at least one special character + bool require_special = 379956471; +} + +// Dynamically generated writeable type for PermissionSet removes properties: + // can, all_access, built_in, id, url +message WritePermissionSet { + // Name of PermissionSet + string name = 3373707; + repeated string permissions = 283426081; +} + +// Dynamically generated writeable type for Project removes properties: + // can, id, uses_git, is_example +message WriteProject { + // Project display name + string name = 3373707; + // Git remote repository url + string git_remote_url = 337946079; + // Git username for HTTPS authentication. (For production only, if using user attributes.) + string git_username = 393319598; + // (Write-Only) Git password for HTTPS authentication. (For production only, if using user attributes.) + string git_password = 348030004; + // User attribute name for username in per-user HTTPS authentication. + string git_username_user_attribute = 430560495; + // User attribute name for password in per-user HTTPS authentication. + string git_password_user_attribute = 173008929; + // Name of the git service provider + string git_service_name = 481280695; + // Port that HTTP(S) application server is running on (for PRs, file browsing, etc.) + int64 git_application_server_http_port = 478877938; + // Scheme that is running on application server (for PRs, file browsing, etc.) Valid values are: "http", "https". + GitApplicationServerHttpScheme git_application_server_http_scheme = 452168167; + // (Write-Only) Optional secret token with which to authenticate requests to the webhook deploy endpoint. If not set, endpoint is unauthenticated. + string deploy_secret = 451554436; + // (Write-Only) When true, unsets the deploy secret to allow unauthenticated access to the webhook deploy endpoint. + bool unset_deploy_secret = 465427100; + // The git pull request policy for this project. Valid values are: "off", "links", "recommended", "required". + PullRequestMode pull_request_mode = 421135683; + // Validation policy: If true, the project must pass validation checks before project changes can be committed to the git repository + bool validation_required = 427509330; + // If true, advanced git release management is enabled for this project + bool git_release_mgmt_enabled = 529764014; + // Validation policy: If true, the project can be committed with warnings when `validation_required` is true. (`allow_warnings` does nothing if `validation_required` is false). + bool allow_warnings = 320266972; + // Status of dependencies in your manifest & lockfile + string dependency_status = 50086042; +} + +// Dynamically generated writeable type for Query removes properties: + // can, id, slug, share_url, expanded_share_url, url, has_table_calculations +message WriteQuery { + // Model + string model = 104069929; + // Explore Name + string view = 3619493; + // Fields + repeated string fields = 318677073; + // Pivots + repeated string pivots = 493953479; + // Fill Fields + repeated string fill_fields = 491318906; + // Filters + map filters = 427273730; + // Filter Expression + string filter_expression = 294547567; + // Sorting for the query results. Use the format `["view.field", ...]` to sort on fields in ascending order. Use the format `["view.field desc", ...]` to sort on fields in descending order. Use `["__UNSORTED__"]` (2 underscores before and after) to disable sorting entirely. Empty sorts `[]` will trigger a default sort. + repeated string sorts = 109624981; + // Limit + string limit = 102976443; + // Column Limit + string column_limit = 114546286; + // Total + bool total = 110549828; + // Raw Total + string row_total = 339028775; + // Fields on which to run subtotals + repeated string subtotals = 277302775; + // Visualization configuration properties. These properties are typically opaque and differ based on the type of visualization used. There is no specified set of allowed keys. The values can be any type supported by JSON. A "type" key with a string value is often present, and is used by Looker to determine which visualization to present. Visualizations ignore unknown vis_config properties. + map vis_config = 422614288; + // The filter_config represents the state of the filter UI on the explore page for a given query. When running a query via the Looker UI, this parameter takes precedence over "filters". When creating a query or modifying an existing query, "filter_config" should be set to null. Setting it to any other value could cause unexpected filtering behavior. The format should be considered opaque. + map filter_config = 505106699; + // Visible UI Sections + string visible_ui_sections = 328103627; + // Dynamic Fields + string dynamic_fields = 74327737; + // Client Id: used to generate shortened explore URLs. If set by client, must be a unique 22 character alphanumeric string. Otherwise one will be generated. + string client_id = 476022396; + // Query Timezone + string query_timezone = 202424720; +} + +// Dynamically generated writeable type for RepositoryCredential removes properties: + // can, id, root_project_id, remote_url, is_configured +message WriteRepositoryCredential { + // Git username for HTTPS authentication. + string git_username = 393319598; + // (Write-Only) Git password for HTTPS authentication. + string git_password = 348030004; + // Public deploy key for SSH authentication. + string ssh_public_key = 461638024; +} + +// Dynamically generated writeable type for ResultMakerWithIdVisConfigAndDynamicFields removes properties: + // id, dynamic_fields, filterables, sorts, merge_result_id, total, query_id, sql_query_id, vis_config +message WriteResultMakerWithIdVisConfigAndDynamicFields { + WriteQuery query = 107944136; +} + +// Dynamically generated writeable type for Role removes properties: + // can, id, url, users_url +message WriteRole { + // Name of Role + string name = 3373707; + WritePermissionSet permission_set = 359445379; + // (Write-Only) Id of permission set + int64 permission_set_id = 379867460; + WriteModelSet model_set = 502576493; + // (Write-Only) Id of model set + int64 model_set_id = 1141778; +} + +// Dynamically generated writeable type for SamlConfig removes properties: + // can, test_slug, modified_at, modified_by, default_new_user_roles, default_new_user_groups, groups, user_attributes, url +message WriteSamlConfig { + // Enable/Disable Saml authentication for the server + bool enabled = 402398511; + // Identity Provider Certificate (provided by IdP) + string idp_cert = 164795758; + // Identity Provider Url (provided by IdP) + string idp_url = 416975417; + // Identity Provider Issuer (provided by IdP) + string idp_issuer = 360328317; + // Identity Provider Audience (set in IdP config). Optional in Looker. Set this only if you want Looker to validate the audience value returned by the IdP. + string idp_audience = 473436179; + // Count of seconds of clock drift to allow when validating timestamps of assertions. + int64 allowed_clock_drift = 494276104; + // Name of user record attributes used to indicate email address field + string user_attribute_map_email = 384506455; + // Name of user record attributes used to indicate first name + string user_attribute_map_first_name = 401269830; + // Name of user record attributes used to indicate last name + string user_attribute_map_last_name = 506361433; + // Merge first-time saml login to existing user account by email addresses. When a user logs in for the first time via saml this option will connect this user into their existing account by finding the account with a matching email address by testing the given types of credentials for existing users. Otherwise a new user account will be created for the user. This list (if provided) must be a comma separated list of string like 'email,ldap,google' + string new_user_migration_types = 348180790; + // Allow alternate email-based login via '/login/email' for admins and for specified users with the 'login_special_email' permission. This option is useful as a fallback during ldap setup, if ldap config problems occur later, or if you need to support some users who are not in your ldap directory. Looker email/password logins are always disabled for regular users when ldap is enabled. + bool alternate_email_login_allowed = 332315627; + // (Write-Only) Array of ids of roles that will be applied to new users the first time they login via Saml + repeated int64 default_new_user_role_ids = 429566625; + // (Write-Only) Array of ids of groups that will be applied to new users the first time they login via Saml + repeated int64 default_new_user_group_ids = 91703103; + // Set user roles in Looker based on groups from Saml + bool set_roles_from_groups = 441288982; + // Name of user record attributes used to indicate groups. Used when 'groups_finder_type' is set to 'grouped_attribute_values' + string groups_attribute = 407121539; + // (Read/Write) Array of mappings between Saml Groups and arrays of Looker Role ids + repeated SamlGroupWrite groups_with_role_ids = 475300071; + // Users will not be allowed to login at all unless a role for them is found in Saml if set to true + bool auth_requires_role = 488630792; + // (Read/Write) Array of mappings between Saml User Attributes and arrays of Looker User Attribute ids + repeated SamlUserAttributeWrite user_attributes_with_ids = 514666905; + // Identifier for a strategy for how Looker will find groups in the SAML response. One of ['grouped_attribute_values', 'individual_attributes'] + string groups_finder_type = 336365796; + // Value for group attribute used to indicate membership. Used when 'groups_finder_type' is set to 'individual_attributes' + string groups_member_value = 400280826; + // Bypass the login page when user authentication is required. Redirect to IdP immediately instead. + bool bypass_login_page = 349317458; + // Allow SAML auth'd users to be members of non-reflected Looker groups. If 'false', user will be removed from non-reflected groups on login. + bool allow_normal_group_membership = 480346046; + // SAML auth'd users will inherit roles from non-reflected Looker groups. + bool allow_roles_from_normal_groups = 302729383; + // Allows roles to be directly assigned to SAML auth'd users. + bool allow_direct_roles = 315512839; +} + +// Dynamically generated writeable type for ScheduledPlan removes properties: + // id, created_at, updated_at, title, user, next_run_at, last_run_at, can +message WriteScheduledPlan { + // Name of this scheduled plan + string name = 3373707; + // User Id which owns this scheduled plan + int64 user_id = 147132913; + // Whether schedule is run as recipient (only applicable for email recipients) + bool run_as_recipient = 296373536; + // Whether the ScheduledPlan is enabled + bool enabled = 402398511; + // Id of a look + int64 look_id = 349778619; + // Id of a dashboard + int64 dashboard_id = 496187565; + // Id of a LookML dashboard + string lookml_dashboard_id = 339184839; + // Query string to run look or dashboard with + string filters_string = 109992149; + // (DEPRECATED) Alias for filters_string field + string dashboard_filters = 454602920; + // Delivery should occur if running the dashboard or look returns results + bool require_results = 330870556; + // Delivery should occur if the dashboard look does not return results + bool require_no_results = 420497737; + // Delivery should occur if data have changed since the last run + bool require_change = 381228813; + // Will run an unlimited query and send all results. + bool send_all_results = 335604919; + // Vixie-Style crontab specification when to run + string crontab = 519088419; + // Name of a datagroup; if specified will run when datagroup triggered (can't be used with cron string) + string datagroup = 356040427; + // Timezone for interpreting the specified crontab (default is Looker instance timezone) + string timezone = 519056897; + // Query id + string query_id = 291663619; + // Scheduled plan destinations + repeated ScheduledPlanDestination scheduled_plan_destination = 354055597; + // Whether the plan in question should only be run once (usually for testing) + bool run_once = 407993818; + // Whether links back to Looker should be included in this ScheduledPlan + bool include_links = 386647463; + // The size of paper the PDF should be formatted to fit. Valid values are: "letter", "legal", "tabloid", "a0", "a1", "a2", "a3", "a4", "a5". + string pdf_paper_size = 491759120; + // Whether the PDF should be formatted for landscape orientation + bool pdf_landscape = 359176724; + // Whether this schedule is in an embed context or not + bool embed = 96620249; + // Color scheme of the dashboard if applicable + string color_theme = 331361403; + // Whether or not to expand table vis to full length + bool long_tables = 359736222; + // The pixel width at which we render the inline table visualizations + int64 inline_table_width = 484902747; +} + +// Dynamically generated writeable type for SessionConfig removes properties: + // can +message WriteSessionConfig { + // Allow users to have persistent sessions when they login + bool allow_persistent_sessions = 319921999; + // Number of minutes for user sessions. Must be between 5 and 43200 + int64 session_minutes = 457960434; + // Allow users to have an unbounded number of concurrent sessions (otherwise, users will be limited to only one session at a time). + bool unlimited_sessions_per_user = 344212172; + // Enforce session logout for sessions that are inactive for 15 minutes. + bool use_inactivity_based_logout = 353744386; + // Track location of session when user logs in. + bool track_session_location = 531851756; +} + +// Dynamically generated writeable type for SshServer removes properties: + // ssh_server_id, finger_print, sha_finger_print, public_key, status +message WriteSshServer { + // The name to identify this SSH Server + string ssh_server_name = 432527764; + // The hostname or ip address of the SSH Server + string ssh_server_host = 432486491; + // The port to connect to on the SSH Server + int64 ssh_server_port = 432546065; + // The username used to connect to the SSH Server + string ssh_server_user = 432584164; +} + +// Dynamically generated writeable type for SshTunnel removes properties: + // tunnel_id, ssh_server_name, ssh_server_host, ssh_server_port, ssh_server_user, last_attempt, local_host_port, status +message WriteSshTunnel { + // SSH Server ID + string ssh_server_id = 326923896; + // Hostname or IP Address of the Database Server + string database_host = 332770181; + // Port that the Database Server is listening on + int64 database_port = 332710606; +} + +// Dynamically generated writeable type for Theme removes properties: + // can, id +message WriteTheme { + // Timestamp for when this theme becomes active. Null=always + google.protobuf.Timestamp begin_at = 372355810; + // Timestamp for when this theme expires. Null=never + google.protobuf.Timestamp end_at = 324690554; + // Name of theme. Can only be alphanumeric and underscores. + string name = 3373707; + ThemeSettings settings = 358657800; +} + +// Dynamically generated writeable type for User removes properties: + // can, avatar_url, avatar_url_without_sizing, credentials_api3, credentials_embed, credentials_google, credentials_ldap, credentials_looker_openid, credentials_oidc, credentials_saml, credentials_totp, display_name, email, embed_group_space_id, group_ids, id, looker_versions, personal_folder_id, presumed_looker_employee, role_ids, sessions, verified_looker_employee, roles_externally_managed, allow_direct_roles, allow_normal_group_membership, allow_roles_from_normal_groups, url +message WriteUser { + WriteCredentialsEmail credentials_email = 395554227; + // First name + string first_name = 160985414; + // ID string for user's home folder + string home_folder_id = 126256340; + // Account has been disabled + bool is_disabled = 464089615; + // Last name + string last_name = 503280549; + // User's preferred locale. User locale takes precedence over Looker's system-wide default locale. Locale determines language of display strings and date and numeric formatting in API responses. Locale string must be a 2 letter language code or a combination of language code and region code: 'en' or 'en-US', for example. + string locale = 274365545; + // User's dev workspace has been checked for presence of applicable production projects + bool models_dir_validated = 476587036; + // Per user dictionary of undocumented state information owned by the Looker UI. + map ui_state = 360744045; +} + +// Dynamically generated writeable type for UserAttribute removes properties: + // can, id, is_system, is_permanent +message WriteUserAttribute { + // Name of user attribute + string name = 3373707; + // Human-friendly label for user attribute + string label = 102727412; + // Type of user attribute ("string", "number", "datetime", "yesno", "zipcode") + string type = 3575610; + // Default value for when no value is set on the user + string default_value = 329667964; + // If true, users will not be able to view values of this attribute + bool value_is_hidden = 386768708; + // Non-admin users can see the values of their attributes and use them in filters + bool user_can_view = 75812952; + // Users can change the value of this attribute for themselves + bool user_can_edit = 76324083; + // Destinations to which a hidden attribute may be sent. Once set, cannot be edited. + string hidden_value_domain_whitelist = 389149836; +} + +// Dynamically generated writeable type for UserAttributeWithValue removes properties: + // can, name, label, rank, user_id, user_can_edit, value_is_hidden, user_attribute_id, source, hidden_value_domain_whitelist +message WriteUserAttributeWithValue { + // Value of attribute for user + string value = 111972721; +} + +// Dynamically generated writeable type for WhitelabelConfiguration removes properties: + // can, id, logo_url, favicon_url +message WriteWhitelabelConfiguration { + // Customer logo image. Expected base64 encoded data (write-only) + string logo_file = 390205724; + // Custom favicon image. Expected base64 encoded data (write-only) + string favicon_file = 516846492; + // Default page title + string default_title = 329267638; + // Boolean to toggle showing help menus + bool show_help_menu = 295555474; + // Boolean to toggle showing docs + bool show_docs = 475949680; + // Boolean to toggle showing email subscription options. + bool show_email_sub_options = 482126522; + // Boolean to toggle mentions of Looker in emails + bool allow_looker_mentions = 454767734; + // Boolean to toggle links to Looker in emails + bool allow_looker_links = 142583420; + // Allow subject line and email heading customization in customized emails” + bool custom_welcome_email_advanced = 25101424; + // Remove the word Looker from appearing in the account setup page + bool setup_mentions = 311522906; + // Remove Looker logo from Alerts + bool alerts_logo = 51977645; + // Remove Looker links from Alerts + bool alerts_links = 402869755; + // Remove Looker mentions in home folder page when you don’t have any items saved + bool folders_mentions = 269755200; +} + +message LoginRequest { + // client_id part of API3 Key. + string client_id = 476022396; + // client_secret part of API3 Key. + string client_secret = 278906578; +} + +message LoginResponse { + // Access token with metadata. + AccessToken result = 1; +} + +message LoginUserRequest { + // Id of user. + int64 user_id = 147132913; + // When true (default), API calls using the returned access_token are attributed to the admin user who created the access_token. When false, API activity is attributed to the user the access_token runs as. False requires a looker license. + bool associative = 87499439; +} + +message LoginUserResponse { + // Access token with metadata. + AccessToken result = 1; +} + +message LogoutRequest { + +} + +message LogoutResponse { + // Logged out successfully. + string result = 1; +} + +message CreateSsoEmbedUrlRequest { + EmbedSsoParams body = 3029410; +} + +message CreateSsoEmbedUrlResponse { + // Signed SSO URL + EmbedUrlResponse result = 1; +} + +message CreateEmbedUrlAsMeRequest { + EmbedParams body = 3029410; +} + +message CreateEmbedUrlAsMeResponse { + // Embed URL + EmbedUrlResponse result = 1; +} + +message LdapConfigRequest { + +} + +message LdapConfigResponse { + // LDAP Configuration. + LDAPConfig result = 1; +} + +message UpdateLdapConfigRequest { + LDAPConfig body = 3029410; +} + +message UpdateLdapConfigResponse { + // New state for LDAP Configuration. + LDAPConfig result = 1; +} + +message TestLdapConfigConnectionRequest { + LDAPConfig body = 3029410; +} + +message TestLdapConfigConnectionResponse { + // Result info. + LDAPConfigTestResult result = 1; +} + +message TestLdapConfigAuthRequest { + LDAPConfig body = 3029410; +} + +message TestLdapConfigAuthResponse { + // Result info. + LDAPConfigTestResult result = 1; +} + +message TestLdapConfigUserInfoRequest { + LDAPConfig body = 3029410; +} + +message TestLdapConfigUserInfoResponse { + // Result info. + LDAPConfigTestResult result = 1; +} + +message TestLdapConfigUserAuthRequest { + LDAPConfig body = 3029410; +} + +message TestLdapConfigUserAuthResponse { + // Result info. + LDAPConfigTestResult result = 1; +} + +message AllOauthClientAppsRequest { + // Requested fields. + string fields = 318677073; +} + +message AllOauthClientAppsResponse { + // OAuth Client App + repeated OauthClientApp result = 1; +} + +message AllOauthClientAppsStreamResponse { + // OAuth Client App + OauthClientApp result = 1; +} + +message OauthClientAppRequest { + // The unique id of this application + string client_guid = 174062979; + // Requested fields. + string fields = 318677073; +} + +message OauthClientAppResponse { + // OAuth Client App + OauthClientApp result = 1; +} + +message RegisterOauthClientAppRequest { + // The unique id of this application + string client_guid = 174062979; + OauthClientApp body = 3029410; + // Requested fields. + string fields = 318677073; +} + +message RegisterOauthClientAppResponse { + // OAuth Client App + OauthClientApp result = 1; +} + +message UpdateOauthClientAppRequest { + // The unique id of this application + string client_guid = 174062979; + OauthClientApp body = 3029410; + // Requested fields. + string fields = 318677073; +} + +message UpdateOauthClientAppResponse { + // OAuth Client App + OauthClientApp result = 1; +} + +message DeleteOauthClientAppRequest { + // The unique id of this application + string client_guid = 174062979; +} + +message DeleteOauthClientAppResponse { + // Successfully deleted. + string result = 1; +} + +message InvalidateTokensRequest { + // The unique id of the application + string client_guid = 174062979; +} + +message InvalidateTokensResponse { + // Successfully deleted. + string result = 1; +} + +message ActivateAppUserRequest { + // The unique id of this application + string client_guid = 174062979; + // The id of the user to enable use of this app + int64 user_id = 147132913; + // Requested fields. + string fields = 318677073; +} + +message ActivateAppUserResponse { + +} + +message DeactivateAppUserRequest { + // The unique id of this application + string client_guid = 174062979; + // The id of the user to enable use of this app + int64 user_id = 147132913; + // Requested fields. + string fields = 318677073; +} + +message DeactivateAppUserResponse { + // Successfully deleted. + string result = 1; +} + +message OidcConfigRequest { + +} + +message OidcConfigResponse { + // OIDC Configuration. + OIDCConfig result = 1; +} + +message UpdateOidcConfigRequest { + OIDCConfig body = 3029410; +} + +message UpdateOidcConfigResponse { + // New state for OIDC Configuration. + OIDCConfig result = 1; +} + +message OidcTestConfigRequest { + // Slug of test config + string test_slug = 295036274; +} + +message OidcTestConfigResponse { + // OIDC test config. + OIDCConfig result = 1; +} + +message DeleteOidcTestConfigRequest { + // Slug of test config + string test_slug = 295036274; +} + +message DeleteOidcTestConfigResponse { + // Test config succssfully deleted. + string result = 1; +} + +message CreateOidcTestConfigRequest { + OIDCConfig body = 3029410; +} + +message CreateOidcTestConfigResponse { + // OIDC test config + OIDCConfig result = 1; +} + +message PasswordConfigRequest { + +} + +message PasswordConfigResponse { + // Password Config + PasswordConfig result = 1; +} + +message UpdatePasswordConfigRequest { + PasswordConfig body = 3029410; +} + +message UpdatePasswordConfigResponse { + // Password Config + PasswordConfig result = 1; +} + +message ForcePasswordResetAtNextLoginForAllUsersRequest { + +} + +message ForcePasswordResetAtNextLoginForAllUsersResponse { + // Password Config + string result = 1; +} + +message SamlConfigRequest { + +} + +message SamlConfigResponse { + // SAML Configuration. + SamlConfig result = 1; +} + +message UpdateSamlConfigRequest { + SamlConfig body = 3029410; +} + +message UpdateSamlConfigResponse { + // New state for SAML Configuration. + SamlConfig result = 1; +} + +message SamlTestConfigRequest { + // Slug of test config + string test_slug = 295036274; +} + +message SamlTestConfigResponse { + // SAML test config. + SamlConfig result = 1; +} + +message DeleteSamlTestConfigRequest { + // Slug of test config + string test_slug = 295036274; +} + +message DeleteSamlTestConfigResponse { + // Test config succssfully deleted. + string result = 1; +} + +message CreateSamlTestConfigRequest { + SamlConfig body = 3029410; +} + +message CreateSamlTestConfigResponse { + // SAML test config + SamlConfig result = 1; +} + +message ParseSamlIdpMetadataRequest { + string body = 3029410; +} + +message ParseSamlIdpMetadataResponse { + // Parse result + SamlMetadataParseResult result = 1; +} + +message FetchAndParseSamlIdpMetadataRequest { + string body = 3029410; +} + +message FetchAndParseSamlIdpMetadataResponse { + // Parse result + SamlMetadataParseResult result = 1; +} + +message SessionConfigRequest { + +} + +message SessionConfigResponse { + // Session Config + SessionConfig result = 1; +} + +message UpdateSessionConfigRequest { + SessionConfig body = 3029410; +} + +message UpdateSessionConfigResponse { + // Session Config + SessionConfig result = 1; +} + +message AllUserLoginLockoutsRequest { + // Include only these fields in the response + string fields = 318677073; +} + +message AllUserLoginLockoutsResponse { + // User Login Lockout + repeated UserLoginLockout result = 1; +} + +message AllUserLoginLockoutsStreamResponse { + // User Login Lockout + UserLoginLockout result = 1; +} + +message SearchUserLoginLockoutsRequest { + // Include only these fields in the response + string fields = 318677073; + // Return only page N of paginated results + int64 page = 3433103; + // Return N rows of data per page + int64 per_page = 424711281; + // Fields to sort by. + string sorts = 109624981; + // Auth type user is locked out for (email, ldap, totp, api) + string auth_type = 365227228; + // Match name + string full_name = 419294065; + // Match email + string email = 96619420; + // Match remote LDAP ID + string remote_id = 320131821; + // Combine given search criteria in a boolean OR expression + bool filter_or = 440686075; +} + +message SearchUserLoginLockoutsResponse { + // User Login Lockout + repeated UserLoginLockout result = 1; +} + +message SearchUserLoginLockoutsStreamResponse { + // User Login Lockout + UserLoginLockout result = 1; +} + +message DeleteUserLoginLockoutRequest { + // The key associated with the locked user + string key = 106079; +} + +message DeleteUserLoginLockoutResponse { + // Successfully deleted. + string result = 1; +} + +message AllBoardsRequest { + // Requested fields. + string fields = 318677073; +} + +message AllBoardsResponse { + // Board + repeated Board result = 1; +} + +message AllBoardsStreamResponse { + // Board + Board result = 1; +} + +message CreateBoardRequest { + Board body = 3029410; + // Requested fields. + string fields = 318677073; +} + +message CreateBoardResponse { + // Board + Board result = 1; +} + +message SearchBoardsRequest { + // Matches board title. + string title = 110371416; + // Matches the timestamp for when the board was created. + string created_at = 342420026; + // The first name of the user who created this board. + string first_name = 160985414; + // The last name of the user who created this board. + string last_name = 503280549; + // Requested fields. + string fields = 318677073; + // Return favorited boards when true. + bool favorited = 446309742; + // Filter on boards created by a particular user. + string creator_id = 344833155; + // The fields to sort the results by + string sorts = 109624981; + // The page to return. + int64 page = 3433103; + // The number of items in the returned page. + int64 per_page = 424711281; + // The number of items to skip before returning any. (used with limit and takes priority over page and per_page) + int64 offset = 509889974; + // The maximum number of items to return. (used with offset and takes priority over page and per_page) + int64 limit = 102976443; + // Combine given search criteria in a boolean OR expression + bool filter_or = 440686075; +} + +message SearchBoardsResponse { + // boards + repeated Board result = 1; +} + +message SearchBoardsStreamResponse { + // boards + Board result = 1; +} + +message BoardRequest { + // Id of board + int64 board_id = 402691141; + // Requested fields. + string fields = 318677073; +} + +message BoardResponse { + // Board + Board result = 1; +} + +message UpdateBoardRequest { + // Id of board + int64 board_id = 402691141; + Board body = 3029410; + // Requested fields. + string fields = 318677073; +} + +message UpdateBoardResponse { + // Board + Board result = 1; +} + +message DeleteBoardRequest { + // Id of board + int64 board_id = 402691141; +} + +message DeleteBoardResponse { + // Successfully deleted. + string result = 1; +} + +message AllBoardItemsRequest { + // Requested fields. + string fields = 318677073; + // Fields to sort by. + string sorts = 109624981; + // Filter to a specific board section + string board_section_id = 501228011; +} + +message AllBoardItemsResponse { + // Board Item + repeated BoardItem result = 1; +} + +message AllBoardItemsStreamResponse { + // Board Item + BoardItem result = 1; +} + +message CreateBoardItemRequest { + BoardItem body = 3029410; + // Requested fields. + string fields = 318677073; +} + +message CreateBoardItemResponse { + // Board Item + BoardItem result = 1; +} + +message BoardItemRequest { + // Id of board item + int64 board_item_id = 428047177; + // Requested fields. + string fields = 318677073; +} + +message BoardItemResponse { + // Board Item + BoardItem result = 1; +} + +message UpdateBoardItemRequest { + // Id of board item + int64 board_item_id = 428047177; + BoardItem body = 3029410; + // Requested fields. + string fields = 318677073; +} + +message UpdateBoardItemResponse { + // Board Item + BoardItem result = 1; +} + +message DeleteBoardItemRequest { + // Id of board_item + int64 board_item_id = 428047177; +} + +message DeleteBoardItemResponse { + // Successfully deleted. + string result = 1; +} + +message AllBoardSectionsRequest { + // Requested fields. + string fields = 318677073; + // Fields to sort by. + string sorts = 109624981; +} + +message AllBoardSectionsResponse { + // Board section + repeated BoardSection result = 1; +} + +message AllBoardSectionsStreamResponse { + // Board section + BoardSection result = 1; +} + +message CreateBoardSectionRequest { + BoardSection body = 3029410; + // Requested fields. + string fields = 318677073; +} + +message CreateBoardSectionResponse { + // Board section + BoardSection result = 1; +} + +message BoardSectionRequest { + // Id of board section + int64 board_section_id = 501228011; + // Requested fields. + string fields = 318677073; +} + +message BoardSectionResponse { + // Board section + BoardSection result = 1; +} + +message UpdateBoardSectionRequest { + // Id of board section + int64 board_section_id = 501228011; + BoardSection body = 3029410; + // Requested fields. + string fields = 318677073; +} + +message UpdateBoardSectionResponse { + // Board section + BoardSection result = 1; +} + +message DeleteBoardSectionRequest { + // Id of board section + int64 board_section_id = 501228011; +} + +message DeleteBoardSectionResponse { + // Successfully deleted. + string result = 1; +} + +message AllColorCollectionsRequest { + // Requested fields. + string fields = 318677073; +} + +message AllColorCollectionsResponse { + // ColorCollections + repeated ColorCollection result = 1; +} + +message AllColorCollectionsStreamResponse { + // ColorCollections + ColorCollection result = 1; +} + +message CreateColorCollectionRequest { + ColorCollection body = 3029410; +} + +message CreateColorCollectionResponse { + // ColorCollection + ColorCollection result = 1; +} + +message ColorCollectionsCustomRequest { + // Requested fields. + string fields = 318677073; +} + +message ColorCollectionsCustomResponse { + // ColorCollections + repeated ColorCollection result = 1; +} + +message ColorCollectionsCustomStreamResponse { + // ColorCollections + ColorCollection result = 1; +} + +message ColorCollectionsStandardRequest { + // Requested fields. + string fields = 318677073; +} + +message ColorCollectionsStandardResponse { + // ColorCollections + repeated ColorCollection result = 1; +} + +message ColorCollectionsStandardStreamResponse { + // ColorCollections + ColorCollection result = 1; +} + +message DefaultColorCollectionRequest { + +} + +message DefaultColorCollectionResponse { + // ColorCollection + ColorCollection result = 1; +} + +message SetDefaultColorCollectionRequest { + // ID of color collection to set as default + string collection_id = 410621138; +} + +message SetDefaultColorCollectionResponse { + // ColorCollection + ColorCollection result = 1; +} + +message ColorCollectionRequest { + // Id of Color Collection + string collection_id = 410621138; + // Requested fields. + string fields = 318677073; +} + +message ColorCollectionResponse { + // ColorCollection + ColorCollection result = 1; +} + +message UpdateColorCollectionRequest { + // Id of Custom Color Collection + string collection_id = 410621138; + ColorCollection body = 3029410; +} + +message UpdateColorCollectionResponse { + // ColorCollection + ColorCollection result = 1; +} + +message DeleteColorCollectionRequest { + // Id of Color Collection + string collection_id = 410621138; +} + +message DeleteColorCollectionResponse { + // Successfully deleted. + string result = 1; +} + +message GetAllCommandsRequest { + // Id of the associated content. This must be accompanied with content_type. + string content_id = 264552097; + // Type of the associated content. This must be accompanied with content_id. + string content_type = 415923104; + // Number of results to return. + int64 limit = 102976443; +} + +message GetAllCommandsResponse { + // Commands + repeated Command result = 1; +} + +message GetAllCommandsStreamResponse { + // Commands + Command result = 1; +} + +message CreateCommandRequest { + Command body = 3029410; +} + +message CreateCommandResponse { + // The command is saved. + Command result = 1; +} + +message UpdateCommandRequest { + // ID of a command + int64 command_id = 392078663; + UpdateCommand body = 3029410; +} + +message UpdateCommandResponse { + // The command is updated. + Command result = 1; +} + +message DeleteCommandRequest { + // ID of a command + int64 command_id = 392078663; +} + +message DeleteCommandResponse { + // The command is deleted. +} + +message CloudStorageConfigurationRequest { + +} + +message CloudStorageConfigurationResponse { + // Current Cloud Storage Configuration + BackupConfiguration result = 1; +} + +message UpdateCloudStorageConfigurationRequest { + BackupConfiguration body = 3029410; +} + +message UpdateCloudStorageConfigurationResponse { + // New state for specified model set. + BackupConfiguration result = 1; +} + +message CustomWelcomeEmailRequest { + +} + +message CustomWelcomeEmailResponse { + // Custom Welcome Email + CustomWelcomeEmail result = 1; +} + +message UpdateCustomWelcomeEmailRequest { + CustomWelcomeEmail body = 3029410; + // If true a test email with the content from the request will be sent to the current user after saving + bool send_test_welcome_email = 416442261; +} + +message UpdateCustomWelcomeEmailResponse { + // Custom Welcome Email + CustomWelcomeEmail result = 1; +} + +message UpdateCustomWelcomeEmailTestRequest { + WelcomeEmailTest body = 3029410; +} + +message UpdateCustomWelcomeEmailTestResponse { + // Send Test Welcome Email + WelcomeEmailTest result = 1; +} + +message DigestEmailsEnabledRequest { + +} + +message DigestEmailsEnabledResponse { + // Digest_emails + DigestEmails result = 1; +} + +message UpdateDigestEmailsEnabledRequest { + DigestEmails body = 3029410; +} + +message UpdateDigestEmailsEnabledResponse { + // Digest_emails + DigestEmails result = 1; +} + +message CreateDigestEmailSendRequest { + +} + +message CreateDigestEmailSendResponse { + // Status of generating and sending the data + DigestEmailSend result = 1; +} + +message InternalHelpResourcesContentRequest { + +} + +message InternalHelpResourcesContentResponse { + // Internal Help Resources Content + InternalHelpResourcesContent result = 1; +} + +message UpdateInternalHelpResourcesContentRequest { + InternalHelpResourcesContent body = 3029410; +} + +message UpdateInternalHelpResourcesContentResponse { + // Internal Help Resources Content + InternalHelpResourcesContent result = 1; +} + +message InternalHelpResourcesRequest { + +} + +message InternalHelpResourcesResponse { + // Internal Help Resources + InternalHelpResources result = 1; +} + +message UpdateInternalHelpResourcesRequest { + InternalHelpResources body = 3029410; +} + +message UpdateInternalHelpResourcesResponse { + // Custom Welcome Email + InternalHelpResources result = 1; +} + +message AllLegacyFeaturesRequest { + +} + +message AllLegacyFeaturesResponse { + // Legacy Feature + repeated LegacyFeature result = 1; +} + +message AllLegacyFeaturesStreamResponse { + // Legacy Feature + LegacyFeature result = 1; +} + +message LegacyFeatureRequest { + // id of legacy feature + string legacy_feature_id = 318344547; +} + +message LegacyFeatureResponse { + // Legacy Feature + LegacyFeature result = 1; +} + +message UpdateLegacyFeatureRequest { + // id of legacy feature + string legacy_feature_id = 318344547; + LegacyFeature body = 3029410; +} + +message UpdateLegacyFeatureResponse { + // Legacy Feature + LegacyFeature result = 1; +} + +message AllLocalesRequest { + +} + +message AllLocalesResponse { + // Locale + repeated Locale result = 1; +} + +message AllLocalesStreamResponse { + // Locale + Locale result = 1; +} + +message AllTimezonesRequest { + +} + +message AllTimezonesResponse { + // Timezone + repeated Timezone result = 1; +} + +message AllTimezonesStreamResponse { + // Timezone + Timezone result = 1; +} + +message VersionsRequest { + // Requested fields. + string fields = 318677073; +} + +message VersionsResponse { + // ApiVersion + ApiVersion result = 1; +} + +message WhitelabelConfigurationRequest { + // Requested fields. + string fields = 318677073; +} + +message WhitelabelConfigurationResponse { + // Whitelabel configuration + WhitelabelConfiguration result = 1; +} + +message UpdateWhitelabelConfigurationRequest { + WhitelabelConfiguration body = 3029410; +} + +message UpdateWhitelabelConfigurationResponse { + // Whitelabel configuration + WhitelabelConfiguration result = 1; +} + +message AllConnectionsRequest { + // Requested fields. + string fields = 318677073; +} + +message AllConnectionsResponse { + // Connection + repeated DBConnection result = 1; +} + +message AllConnectionsStreamResponse { + // Connection + DBConnection result = 1; +} + +message CreateConnectionRequest { + DBConnection body = 3029410; +} + +message CreateConnectionResponse { + // Connection + DBConnection result = 1; +} + +message ConnectionRequest { + // Name of connection + string connection_name = 365832102; + // Requested fields. + string fields = 318677073; +} + +message ConnectionResponse { + // Connection + DBConnection result = 1; +} + +message UpdateConnectionRequest { + // Name of connection + string connection_name = 365832102; + DBConnection body = 3029410; +} + +message UpdateConnectionResponse { + // Connection + DBConnection result = 1; +} + +message DeleteConnectionRequest { + // Name of connection + string connection_name = 365832102; +} + +message DeleteConnectionResponse { + // Successfully deleted. + string result = 1; +} + +message DeleteConnectionOverrideRequest { + // Name of connection + string connection_name = 365832102; + // Context of connection override + string override_context = 284763377; +} + +message DeleteConnectionOverrideResponse { + // Successfully deleted. + string result = 1; +} + +message TestConnectionRequest { + // Name of connection + string connection_name = 365832102; + // Array of names of tests to run + string tests = 110251553; +} + +message TestConnectionResponse { + // Test results + repeated DBConnectionTestResult result = 1; +} + +message TestConnectionStreamResponse { + // Test results + DBConnectionTestResult result = 1; +} + +message TestConnectionConfigRequest { + DBConnection body = 3029410; + // Array of names of tests to run + string tests = 110251553; +} + +message TestConnectionConfigResponse { + // Test results + repeated DBConnectionTestResult result = 1; +} + +message TestConnectionConfigStreamResponse { + // Test results + DBConnectionTestResult result = 1; +} + +message AllDialectInfosRequest { + // Requested fields. + string fields = 318677073; +} + +message AllDialectInfosResponse { + // Dialect Info + repeated DialectInfo result = 1; +} + +message AllDialectInfosStreamResponse { + // Dialect Info + DialectInfo result = 1; +} + +message AllExternalOauthApplicationsRequest { + // Application name + string name = 3373707; + // Application Client ID + string client_id = 476022396; +} + +message AllExternalOauthApplicationsResponse { + // External OAuth Application. This is an OAuth Application which Looker uses to access external systems. + repeated ExternalOauthApplication result = 1; +} + +message AllExternalOauthApplicationsStreamResponse { + // External OAuth Application. This is an OAuth Application which Looker uses to access external systems. + ExternalOauthApplication result = 1; +} + +message CreateExternalOauthApplicationRequest { + ExternalOauthApplication body = 3029410; +} + +message CreateExternalOauthApplicationResponse { + // External OAuth Application. This is an OAuth Application which Looker uses to access external systems. + ExternalOauthApplication result = 1; +} + +message AllSshServersRequest { + // Requested fields. + string fields = 318677073; +} + +message AllSshServersResponse { + // SSH Server + repeated SshServer result = 1; +} + +message AllSshServersStreamResponse { + // SSH Server + SshServer result = 1; +} + +message CreateSshServerRequest { + SshServer body = 3029410; +} + +message CreateSshServerResponse { + // SSH Server + SshServer result = 1; +} + +message SshServerRequest { + // Id of SSH Server + string ssh_server_id = 326923896; +} + +message SshServerResponse { + // SSH Server + SshServer result = 1; +} + +message UpdateSshServerRequest { + // Id of SSH Server + string ssh_server_id = 326923896; + SshServer body = 3029410; +} + +message UpdateSshServerResponse { + // SSH Server + SshServer result = 1; +} + +message DeleteSshServerRequest { + // Id of SSH Server + string ssh_server_id = 326923896; +} + +message DeleteSshServerResponse { + // Successfully deleted. + string result = 1; +} + +message TestSshServerRequest { + // Id of SSH Server + string ssh_server_id = 326923896; +} + +message TestSshServerResponse { + // Test SSH Server + SshServer result = 1; +} + +message AllSshTunnelsRequest { + // Requested fields. + string fields = 318677073; +} + +message AllSshTunnelsResponse { + // SSH Tunnel + repeated SshTunnel result = 1; +} + +message AllSshTunnelsStreamResponse { + // SSH Tunnel + SshTunnel result = 1; +} + +message CreateSshTunnelRequest { + SshTunnel body = 3029410; +} + +message CreateSshTunnelResponse { + // SSH Tunnel + SshTunnel result = 1; +} + +message SshTunnelRequest { + // Id of SSH Tunnel + string ssh_tunnel_id = 484247845; +} + +message SshTunnelResponse { + // SSH Tunnel + SshTunnel result = 1; +} + +message UpdateSshTunnelRequest { + // Id of SSH Tunnel + string ssh_tunnel_id = 484247845; + SshTunnel body = 3029410; +} + +message UpdateSshTunnelResponse { + // SSH Tunnel + SshTunnel result = 1; +} + +message DeleteSshTunnelRequest { + // Id of SSH Tunnel + string ssh_tunnel_id = 484247845; +} + +message DeleteSshTunnelResponse { + // Successfully deleted. + string result = 1; +} + +message TestSshTunnelRequest { + // Id of SSH Tunnel + string ssh_tunnel_id = 484247845; +} + +message TestSshTunnelResponse { + // Test SSH Tunnel + SshTunnel result = 1; +} + +message SshPublicKeyRequest { + +} + +message SshPublicKeyResponse { + // SSH Public Key + SshPublicKey result = 1; +} + +message SearchContentFavoritesRequest { + // Match content favorite id(s) + int64 id = 3355; + // Match user id(s).To create a list of multiple ids, use commas as separators + string user_id = 147132913; + // Match content metadata id(s).To create a list of multiple ids, use commas as separators + string content_metadata_id = 293222822; + // Match dashboard id(s).To create a list of multiple ids, use commas as separators + string dashboard_id = 496187565; + // Match look id(s).To create a list of multiple ids, use commas as separators + string look_id = 349778619; + // Match board id(s).To create a list of multiple ids, use commas as separators + string board_id = 402691141; + // Number of results to return. (used with offset) + int64 limit = 102976443; + // Number of results to skip before returning any. (used with limit) + int64 offset = 509889974; + // Fields to sort by. + string sorts = 109624981; + // Requested fields. + string fields = 318677073; + // Combine given search criteria in a boolean OR expression + bool filter_or = 440686075; +} + +message SearchContentFavoritesResponse { + // Favorite Content + repeated ContentFavorite result = 1; +} + +message SearchContentFavoritesStreamResponse { + // Favorite Content + ContentFavorite result = 1; +} + +message ContentFavoriteRequest { + // Id of favorite content + int64 content_favorite_id = 403544586; + // Requested fields. + string fields = 318677073; +} + +message ContentFavoriteResponse { + // Favorite Content + ContentFavorite result = 1; +} + +message DeleteContentFavoriteRequest { + // Id of favorite content + int64 content_favorite_id = 403544586; +} + +message DeleteContentFavoriteResponse { + // Successfully deleted. + string result = 1; +} + +message CreateContentFavoriteRequest { + ContentFavorite body = 3029410; +} + +message CreateContentFavoriteResponse { + // Favorite Content + ContentFavorite result = 1; +} + +message AllContentMetadatasRequest { + // Parent space of content. + int64 parent_id = 517581876; + // Requested fields. + string fields = 318677073; +} + +message AllContentMetadatasResponse { + // Content Metadata + repeated ContentMeta result = 1; +} + +message AllContentMetadatasStreamResponse { + // Content Metadata + ContentMeta result = 1; +} + +message ContentMetadataRequest { + // Id of content metadata + int64 content_metadata_id = 293222822; + // Requested fields. + string fields = 318677073; +} + +message ContentMetadataResponse { + // Content Metadata + ContentMeta result = 1; +} + +message UpdateContentMetadataRequest { + // Id of content metadata + int64 content_metadata_id = 293222822; + ContentMeta body = 3029410; +} + +message UpdateContentMetadataResponse { + // Content Metadata + ContentMeta result = 1; +} + +message AllContentMetadataAccessesRequest { + // Id of content metadata + int64 content_metadata_id = 293222822; + // Requested fields. + string fields = 318677073; +} + +message AllContentMetadataAccessesResponse { + // Content Metadata Access + repeated ContentMetaGroupUser result = 1; +} + +message AllContentMetadataAccessesStreamResponse { + // Content Metadata Access + ContentMetaGroupUser result = 1; +} + +message CreateContentMetadataAccessRequest { + ContentMetaGroupUser body = 3029410; + // Optionally sends notification email when granting access to a board. + bool send_boards_notification_email = 49054589; +} + +message CreateContentMetadataAccessResponse { + // Content Metadata Access + ContentMetaGroupUser result = 1; +} + +message UpdateContentMetadataAccessRequest { + // Id of content metadata access + string content_metadata_access_id = 311738150; + ContentMetaGroupUser body = 3029410; +} + +message UpdateContentMetadataAccessResponse { + // Content Metadata Access + ContentMetaGroupUser result = 1; +} + +message DeleteContentMetadataAccessRequest { + // Id of content metadata access + int64 content_metadata_access_id = 311738150; +} + +message DeleteContentMetadataAccessResponse { + // Successfully deleted. + string result = 1; +} + +message ContentThumbnailRequest { + // Either dashboard or look + string type = 3575610; + // ID of the dashboard or look to render + string resource_id = 308634299; + // Whether or not to refresh the rendered image with the latest content + string reload = 467320627; + // A value of png produces a thumbnail in PNG format instead of SVG (default) + string format = 317194754; + // The width of the image if format is supplied + int64 width = 113126854; + // The height of the image if format is supplied + int64 height = 305257398; +} + +message ContentThumbnailResponse { + +} + +message ContentValidationRequest { + // Requested fields. + string fields = 318677073; +} + +message ContentValidationResponse { + // Content validation results + ContentValidation result = 1; +} + +message SearchContentViewsRequest { + // Match view count + string view_count = 383588418; + // Match Group Id + string group_id = 506361563; + // Match look_id + string look_id = 349778619; + // Match dashboard_id + string dashboard_id = 496187565; + // Match content metadata id + string content_metadata_id = 293222822; + // Match start of week date (format is "YYYY-MM-DD") + string start_of_week_date = 75308398; + // True if only all time view records should be returned + bool all_time = 449567658; + // Match user id + string user_id = 147132913; + // Requested fields + string fields = 318677073; + // Number of results to return. Use with `offset` to manage pagination of results + int64 limit = 102976443; + // Number of results to skip before returning data + int64 offset = 509889974; + // Fields to sort by + string sorts = 109624981; + // Combine given search criteria in a boolean OR expression + bool filter_or = 440686075; +} + +message SearchContentViewsResponse { + // Content View + repeated ContentView result = 1; +} + +message SearchContentViewsStreamResponse { + // Content View + ContentView result = 1; +} + +message VectorThumbnailRequest { + // Either dashboard or look + string type = 3575610; + // ID of the dashboard or look to render + string resource_id = 308634299; + // Whether or not to refresh the rendered image with the latest content + string reload = 467320627; +} + +message VectorThumbnailResponse { + // Vector thumbnail + string result = 1; +} + +message AllDashboardsRequest { + // Requested fields. + string fields = 318677073; +} + +message AllDashboardsResponse { + // dashboards + repeated DashboardBase result = 1; +} + +message AllDashboardsStreamResponse { + // dashboards + DashboardBase result = 1; +} + +message CreateDashboardRequest { + Dashboard body = 3029410; +} + +message CreateDashboardResponse { + // Dashboard + Dashboard result = 1; +} + +message SearchDashboardsRequest { + // Match dashboard id. + string id = 3355; + // Match dashboard slug. + string slug = 3533483; + // Match Dashboard title. + string title = 110371416; + // Match Dashboard description. + string description = 431136513; + // Filter on a content favorite id. + string content_favorite_id = 403544586; + // Filter on a particular space. + string folder_id = 527488652; + // Filter on dashboards deleted status. + string deleted = 387615750; + // Filter on dashboards created by a particular user. + string user_id = 147132913; + // Filter on a particular value of view_count + string view_count = 383588418; + // Filter on a content favorite id. + string content_metadata_id = 293222822; + // Exclude items that exist only in personal spaces other than the users + bool curate = 337284075; + // Select dashboards based on when they were last viewed + string last_viewed_at = 273134349; + // Requested fields. + string fields = 318677073; + // Requested page. + int64 page = 3433103; + // Results per page. + int64 per_page = 424711281; + // Number of results to return. (used with offset and takes priority over page and per_page) + int64 limit = 102976443; + // Number of results to skip before returning any. (used with limit and takes priority over page and per_page) + int64 offset = 509889974; + // One or more fields to sort by. Sortable fields: [:title, :user_id, :id, :created_at, :space_id, :folder_id, :description, :view_count, :favorite_count, :slug, :content_favorite_id, :content_metadata_id, :deleted, :deleted_at, :last_viewed_at, :last_accessed_at] + string sorts = 109624981; + // Combine given search criteria in a boolean OR expression + bool filter_or = 440686075; +} + +message SearchDashboardsResponse { + // dashboards + repeated Dashboard result = 1; +} + +message SearchDashboardsStreamResponse { + // dashboards + Dashboard result = 1; +} + +message ImportLookmlDashboardRequest { + // Id of LookML dashboard + string lookml_dashboard_id = 339184839; + // Id of space to import the dashboard to + string space_id = 511862461; + Dashboard body = 3029410; + // If true, and this dashboard is localized, export it with the raw keys, not localized. + bool raw_locale = 401271403; +} + +message ImportLookmlDashboardResponse { + // Dashboard + Dashboard result = 1; +} + +message SyncLookmlDashboardRequest { + // Id of LookML dashboard, in the form 'model::dashboardname' + string lookml_dashboard_id = 339184839; + Dashboard body = 3029410; + // If true, and this dashboard is localized, export it with the raw keys, not localized. + bool raw_locale = 401271403; +} + +message SyncLookmlDashboardResponse { + // Ids of all the dashboards that were updated by this operation + repeated int64 result = 1; +} + +message SyncLookmlDashboardStreamResponse { + // Ids of all the dashboards that were updated by this operation + int64 result = 1; +} + +message DashboardRequest { + // Id of dashboard + string dashboard_id = 496187565; + // Requested fields. + string fields = 318677073; +} + +message DashboardResponse { + // Dashboard + Dashboard result = 1; +} + +message UpdateDashboardRequest { + // Id of dashboard + string dashboard_id = 496187565; + Dashboard body = 3029410; +} + +message UpdateDashboardResponse { + // Dashboard + Dashboard result = 1; +} + +message DeleteDashboardRequest { + // Id of dashboard + string dashboard_id = 496187565; +} + +message DeleteDashboardResponse { + // Successfully deleted. + string result = 1; +} + +message DashboardAggregateTableLookmlRequest { + // Id of dashboard + string dashboard_id = 496187565; +} + +message DashboardAggregateTableLookmlResponse { + // JSON for Aggregate Table LookML + DashboardAggregateTableLookml result = 1; +} + +message DashboardLookmlRequest { + // Id of dashboard + string dashboard_id = 496187565; +} + +message DashboardLookmlResponse { + // json of dashboard + DashboardLookml result = 1; +} + +message SearchDashboardElementsRequest { + // Select elements that refer to a given dashboard id + int64 dashboard_id = 496187565; + // Select elements that refer to a given look id + int64 look_id = 349778619; + // Match the title of element + string title = 110371416; + // Select soft-deleted dashboard elements + bool deleted = 387615750; + // Requested fields. + string fields = 318677073; + // Combine given search criteria in a boolean OR expression + bool filter_or = 440686075; + // Fields to sort by. Sortable fields: [:look_id, :dashboard_id, :deleted, :title] + string sorts = 109624981; +} + +message SearchDashboardElementsResponse { + // Dashboard elements + repeated DashboardElement result = 1; +} + +message SearchDashboardElementsStreamResponse { + // Dashboard elements + DashboardElement result = 1; +} + +message DashboardElementRequest { + // Id of dashboard element + string dashboard_element_id = 79370295; + // Requested fields. + string fields = 318677073; +} + +message DashboardElementResponse { + // DashboardElement + DashboardElement result = 1; +} + +message UpdateDashboardElementRequest { + // Id of dashboard element + string dashboard_element_id = 79370295; + DashboardElement body = 3029410; + // Requested fields. + string fields = 318677073; +} + +message UpdateDashboardElementResponse { + // DashboardElement + DashboardElement result = 1; +} + +message DeleteDashboardElementRequest { + // Id of dashboard element + string dashboard_element_id = 79370295; +} + +message DeleteDashboardElementResponse { + // Successfully deleted. + string result = 1; +} + +message DashboardDashboardElementsRequest { + // Id of dashboard + string dashboard_id = 496187565; + // Requested fields. + string fields = 318677073; +} + +message DashboardDashboardElementsResponse { + // DashboardElement + repeated DashboardElement result = 1; +} + +message DashboardDashboardElementsStreamResponse { + // DashboardElement + DashboardElement result = 1; +} + +message CreateDashboardElementRequest { + DashboardElement body = 3029410; + // Requested fields. + string fields = 318677073; +} + +message CreateDashboardElementResponse { + // DashboardElement + DashboardElement result = 1; +} + +message DashboardFilterRequest { + // Id of dashboard filters + string dashboard_filter_id = 467108821; + // Requested fields. + string fields = 318677073; +} + +message DashboardFilterResponse { + // Dashboard Filter + DashboardFilter result = 1; +} + +message UpdateDashboardFilterRequest { + // Id of dashboard filter + string dashboard_filter_id = 467108821; + DashboardFilter body = 3029410; + // Requested fields. + string fields = 318677073; +} + +message UpdateDashboardFilterResponse { + // Dashboard Filter + DashboardFilter result = 1; +} + +message DeleteDashboardFilterRequest { + // Id of dashboard filter + string dashboard_filter_id = 467108821; +} + +message DeleteDashboardFilterResponse { + // Successfully deleted. + string result = 1; +} + +message DashboardDashboardFiltersRequest { + // Id of dashboard + string dashboard_id = 496187565; + // Requested fields. + string fields = 318677073; +} + +message DashboardDashboardFiltersResponse { + // Dashboard Filter + repeated DashboardFilter result = 1; +} + +message DashboardDashboardFiltersStreamResponse { + // Dashboard Filter + DashboardFilter result = 1; +} + +message CreateDashboardFilterRequest { + CreateDashboardFilter body = 3029410; + // Requested fields + string fields = 318677073; +} + +message CreateDashboardFilterResponse { + // Dashboard Filter + DashboardFilter result = 1; +} + +message DashboardLayoutComponentRequest { + // Id of dashboard layout component + string dashboard_layout_component_id = 391026662; + // Requested fields. + string fields = 318677073; +} + +message DashboardLayoutComponentResponse { + // DashboardLayoutComponent + DashboardLayoutComponent result = 1; +} + +message UpdateDashboardLayoutComponentRequest { + // Id of dashboard layout component + string dashboard_layout_component_id = 391026662; + DashboardLayoutComponent body = 3029410; + // Requested fields. + string fields = 318677073; +} + +message UpdateDashboardLayoutComponentResponse { + // DashboardLayoutComponent + DashboardLayoutComponent result = 1; +} + +message DashboardLayoutDashboardLayoutComponentsRequest { + // Id of dashboard layout component + string dashboard_layout_id = 343841433; + // Requested fields. + string fields = 318677073; +} + +message DashboardLayoutDashboardLayoutComponentsResponse { + // DashboardLayoutComponent + repeated DashboardLayoutComponent result = 1; +} + +message DashboardLayoutDashboardLayoutComponentsStreamResponse { + // DashboardLayoutComponent + DashboardLayoutComponent result = 1; +} + +message DashboardLayoutRequest { + // Id of dashboard layouts + string dashboard_layout_id = 343841433; + // Requested fields. + string fields = 318677073; +} + +message DashboardLayoutResponse { + // DashboardLayout + DashboardLayout result = 1; +} + +message UpdateDashboardLayoutRequest { + // Id of dashboard layout + string dashboard_layout_id = 343841433; + DashboardLayout body = 3029410; + // Requested fields. + string fields = 318677073; +} + +message UpdateDashboardLayoutResponse { + // DashboardLayout + DashboardLayout result = 1; +} + +message DeleteDashboardLayoutRequest { + // Id of dashboard layout + string dashboard_layout_id = 343841433; +} + +message DeleteDashboardLayoutResponse { + // Successfully deleted. + string result = 1; +} + +message DashboardDashboardLayoutsRequest { + // Id of dashboard + string dashboard_id = 496187565; + // Requested fields. + string fields = 318677073; +} + +message DashboardDashboardLayoutsResponse { + // DashboardLayout + repeated DashboardLayout result = 1; +} + +message DashboardDashboardLayoutsStreamResponse { + // DashboardLayout + DashboardLayout result = 1; +} + +message CreateDashboardLayoutRequest { + DashboardLayout body = 3029410; + // Requested fields. + string fields = 318677073; +} + +message CreateDashboardLayoutResponse { + // DashboardLayout + DashboardLayout result = 1; +} + +message PerformDataActionRequest { + DataActionRequest body = 3029410; +} + +message PerformDataActionResponse { + // Data Action Response + DataActionResponse result = 1; +} + +message FetchRemoteDataActionFormRequest { + map body = 3029410; +} + +message FetchRemoteDataActionFormResponse { + // Data Action Form + DataActionForm result = 1; +} + +message AllDatagroupsRequest { + +} + +message AllDatagroupsResponse { + // Datagroup + repeated Datagroup result = 1; +} + +message AllDatagroupsStreamResponse { + // Datagroup + Datagroup result = 1; +} + +message DatagroupRequest { + // ID of datagroup. + int64 datagroup_id = 442238753; +} + +message DatagroupResponse { + // Datagroup + Datagroup result = 1; +} + +message UpdateDatagroupRequest { + // ID of datagroup. + int64 datagroup_id = 442238753; + Datagroup body = 3029410; +} + +message UpdateDatagroupResponse { + // Datagroup + Datagroup result = 1; +} + +message SearchFoldersRequest { + // Requested fields. + string fields = 318677073; + // Requested page. + int64 page = 3433103; + // Results per page. + int64 per_page = 424711281; + // Number of results to return. (used with offset and takes priority over page and per_page) + int64 limit = 102976443; + // Number of results to skip before returning any. (used with limit and takes priority over page and per_page) + int64 offset = 509889974; + // Fields to sort by. + string sorts = 109624981; + // Match Space title. + string name = 3373707; + // Match Space id + int64 id = 3355; + // Filter on a children of a particular folder. + string parent_id = 517581876; + // Filter on folder created by a particular user. + string creator_id = 344833155; + // Combine given search criteria in a boolean OR expression + bool filter_or = 440686075; +} + +message SearchFoldersResponse { + // folders + repeated Folder result = 1; +} + +message SearchFoldersStreamResponse { + // folders + Folder result = 1; +} + +message FolderRequest { + // Id of folder + string folder_id = 527488652; + // Requested fields. + string fields = 318677073; +} + +message FolderResponse { + // Folder + Folder result = 1; +} + +message UpdateFolderRequest { + // Id of folder + string folder_id = 527488652; + UpdateFolder body = 3029410; +} + +message UpdateFolderResponse { + // Folder + Folder result = 1; +} + +message DeleteFolderRequest { + // Id of folder + string folder_id = 527488652; +} + +message DeleteFolderResponse { + // Successfully deleted. + string result = 1; +} + +message AllFoldersRequest { + // Requested fields. + string fields = 318677073; +} + +message AllFoldersResponse { + // Folder + repeated Folder result = 1; +} + +message AllFoldersStreamResponse { + // Folder + Folder result = 1; +} + +message CreateFolderRequest { + CreateFolder body = 3029410; +} + +message CreateFolderResponse { + // Folder + Folder result = 1; +} + +message FolderChildrenRequest { + // Id of folder + string folder_id = 527488652; + // Requested fields. + string fields = 318677073; + // Requested page. + int64 page = 3433103; + // Results per page. + int64 per_page = 424711281; + // Fields to sort by. + string sorts = 109624981; +} + +message FolderChildrenResponse { + // Folders + repeated Folder result = 1; +} + +message FolderChildrenStreamResponse { + // Folders + Folder result = 1; +} + +message FolderChildrenSearchRequest { + // Id of folder + string folder_id = 527488652; + // Requested fields. + string fields = 318677073; + // Fields to sort by. + string sorts = 109624981; + // Match folder name. + string name = 3373707; +} + +message FolderChildrenSearchResponse { + // Folders + repeated Folder result = 1; +} + +message FolderChildrenSearchStreamResponse { + // Folders + Folder result = 1; +} + +message FolderParentRequest { + // Id of folder + string folder_id = 527488652; + // Requested fields. + string fields = 318677073; +} + +message FolderParentResponse { + // Folder + Folder result = 1; +} + +message FolderAncestorsRequest { + // Id of folder + string folder_id = 527488652; + // Requested fields. + string fields = 318677073; +} + +message FolderAncestorsResponse { + // Folders + repeated Folder result = 1; +} + +message FolderAncestorsStreamResponse { + // Folders + Folder result = 1; +} + +message FolderLooksRequest { + // Id of folder + string folder_id = 527488652; + // Requested fields. + string fields = 318677073; +} + +message FolderLooksResponse { + // Looks + repeated LookWithQuery result = 1; +} + +message FolderLooksStreamResponse { + // Looks + LookWithQuery result = 1; +} + +message FolderDashboardsRequest { + // Id of folder + string folder_id = 527488652; + // Requested fields. + string fields = 318677073; +} + +message FolderDashboardsResponse { + // Dashboard + repeated Dashboard result = 1; +} + +message FolderDashboardsStreamResponse { + // Dashboard + Dashboard result = 1; +} + +message AllGroupsRequest { + // Requested fields. + string fields = 318677073; + // Requested page. + int64 page = 3433103; + // Results per page. + int64 per_page = 424711281; + // Fields to sort by. + string sorts = 109624981; + // Optional of ids to get specific groups. + string ids = 104120; + // Id of content metadata to which groups must have access. + int64 content_metadata_id = 293222822; + // Select only groups that either can/cannot be given access to content. + bool can_add_to_content_metadata = 97036652; +} + +message AllGroupsResponse { + // Group + repeated Group result = 1; +} + +message AllGroupsStreamResponse { + // Group + Group result = 1; +} + +message CreateGroupRequest { + Group body = 3029410; + // Requested fields. + string fields = 318677073; +} + +message CreateGroupResponse { + // Group + Group result = 1; +} + +message SearchGroupsRequest { + // Requested fields. + string fields = 318677073; + // Number of results to return (used with `offset`). + int64 limit = 102976443; + // Number of results to skip before returning any (used with `limit`). + int64 offset = 509889974; + // Fields to sort by. + string sorts = 109624981; + // Combine given search criteria in a boolean OR expression + bool filter_or = 440686075; + // Match group id. + int64 id = 3355; + // Match group name. + string name = 3373707; + // Match group external_group_id. + string external_group_id = 515008972; + // Match group externally_managed. + bool externally_managed = 317189570; + // Match group externally_orphaned. + bool externally_orphaned = 390033015; +} + +message SearchGroupsResponse { + // Group + repeated Group result = 1; +} + +message SearchGroupsStreamResponse { + // Group + Group result = 1; +} + +message SearchGroupsWithRolesRequest { + // Requested fields. + string fields = 318677073; + // Number of results to return (used with `offset`). + int64 limit = 102976443; + // Number of results to skip before returning any (used with `limit`). + int64 offset = 509889974; + // Fields to sort by. + string sorts = 109624981; + // Combine given search criteria in a boolean OR expression + bool filter_or = 440686075; + // Match group id. + int64 id = 3355; + // Match group name. + string name = 3373707; + // Match group external_group_id. + string external_group_id = 515008972; + // Match group externally_managed. + bool externally_managed = 317189570; + // Match group externally_orphaned. + bool externally_orphaned = 390033015; +} + +message SearchGroupsWithRolesResponse { + // Group + repeated GroupSearch result = 1; +} + +message SearchGroupsWithRolesStreamResponse { + // Group + GroupSearch result = 1; +} + +message SearchGroupsWithHierarchyRequest { + // Requested fields. + string fields = 318677073; + // Number of results to return (used with `offset`). + int64 limit = 102976443; + // Number of results to skip before returning any (used with `limit`). + int64 offset = 509889974; + // Fields to sort by. + string sorts = 109624981; + // Combine given search criteria in a boolean OR expression + bool filter_or = 440686075; + // Match group id. + int64 id = 3355; + // Match group name. + string name = 3373707; + // Match group external_group_id. + string external_group_id = 515008972; + // Match group externally_managed. + bool externally_managed = 317189570; + // Match group externally_orphaned. + bool externally_orphaned = 390033015; +} + +message SearchGroupsWithHierarchyResponse { + // Group + repeated GroupHierarchy result = 1; +} + +message SearchGroupsWithHierarchyStreamResponse { + // Group + GroupHierarchy result = 1; +} + +message GroupRequest { + // Id of group + int64 group_id = 506361563; + // Requested fields. + string fields = 318677073; +} + +message GroupResponse { + // Group + Group result = 1; +} + +message UpdateGroupRequest { + // Id of group + int64 group_id = 506361563; + Group body = 3029410; + // Requested fields. + string fields = 318677073; +} + +message UpdateGroupResponse { + // Group + Group result = 1; +} + +message DeleteGroupRequest { + // Id of group + int64 group_id = 506361563; +} + +message DeleteGroupResponse { + // Successfully deleted. + string result = 1; +} + +message AllGroupGroupsRequest { + // Id of group + int64 group_id = 506361563; + // Requested fields. + string fields = 318677073; +} + +message AllGroupGroupsResponse { + // All groups in group. + repeated Group result = 1; +} + +message AllGroupGroupsStreamResponse { + // All groups in group. + Group result = 1; +} + +message AddGroupGroupRequest { + // Id of group + int64 group_id = 506361563; + GroupIdForGroupInclusion body = 3029410; +} + +message AddGroupGroupResponse { + // Added group. + Group result = 1; +} + +message AllGroupUsersRequest { + // Id of group + int64 group_id = 506361563; + // Requested fields. + string fields = 318677073; + // Requested page. + int64 page = 3433103; + // Results per page. + int64 per_page = 424711281; + // Fields to sort by. + string sorts = 109624981; +} + +message AllGroupUsersResponse { + // All users in group. + repeated User result = 1; +} + +message AllGroupUsersStreamResponse { + // All users in group. + User result = 1; +} + +message AddGroupUserRequest { + // Id of group + int64 group_id = 506361563; + GroupIdForGroupUserInclusion body = 3029410; +} + +message AddGroupUserResponse { + // Added user. + User result = 1; +} + +message DeleteGroupUserRequest { + // Id of group + int64 group_id = 506361563; + // Id of user to remove from group + int64 user_id = 147132913; +} + +message DeleteGroupUserResponse { + // User successfully removed from group +} + +message DeleteGroupFromGroupRequest { + // Id of group + int64 group_id = 506361563; + // Id of group to delete + int64 deleting_group_id = 340570313; +} + +message DeleteGroupFromGroupResponse { + // Group successfully deleted +} + +message UpdateUserAttributeGroupValueRequest { + // Id of group + int64 group_id = 506361563; + // Id of user attribute + int64 user_attribute_id = 534680475; + UserAttributeGroupValue body = 3029410; +} + +message UpdateUserAttributeGroupValueResponse { + // Group value object. + UserAttributeGroupValue result = 1; +} + +message DeleteUserAttributeGroupValueRequest { + // Id of group + int64 group_id = 506361563; + // Id of user attribute + int64 user_attribute_id = 534680475; +} + +message DeleteUserAttributeGroupValueResponse { + // Value successfully unset +} + +message AllPrimaryHomepageSectionsRequest { + // Requested fields. + string fields = 318677073; +} + +message AllPrimaryHomepageSectionsResponse { + // Primary homepage section + repeated HomepageSection result = 1; +} + +message AllPrimaryHomepageSectionsStreamResponse { + // Primary homepage section + HomepageSection result = 1; +} + +message AllIntegrationHubsRequest { + // Requested fields. + string fields = 318677073; +} + +message AllIntegrationHubsResponse { + // Integration Hub + repeated IntegrationHub result = 1; +} + +message AllIntegrationHubsStreamResponse { + // Integration Hub + IntegrationHub result = 1; +} + +message CreateIntegrationHubRequest { + IntegrationHub body = 3029410; + // Requested fields. + string fields = 318677073; +} + +message CreateIntegrationHubResponse { + // Integration Hub + IntegrationHub result = 1; +} + +message IntegrationHubRequest { + // Id of Integration Hub + int64 integration_hub_id = 485110416; + // Requested fields. + string fields = 318677073; +} + +message IntegrationHubResponse { + // Integration Hub + IntegrationHub result = 1; +} + +message UpdateIntegrationHubRequest { + // Id of Integration Hub + int64 integration_hub_id = 485110416; + IntegrationHub body = 3029410; + // Requested fields. + string fields = 318677073; +} + +message UpdateIntegrationHubResponse { + // Integration Hub + IntegrationHub result = 1; +} + +message DeleteIntegrationHubRequest { + // Id of integration_hub + int64 integration_hub_id = 485110416; +} + +message DeleteIntegrationHubResponse { + // Successfully deleted. + string result = 1; +} + +message AcceptIntegrationHubLegalAgreementRequest { + // Id of integration_hub + int64 integration_hub_id = 485110416; +} + +message AcceptIntegrationHubLegalAgreementResponse { + // Integration hub + IntegrationHub result = 1; +} + +message AllIntegrationsRequest { + // Requested fields. + string fields = 318677073; + // Filter to a specific provider + string integration_hub_id = 485110416; +} + +message AllIntegrationsResponse { + // Integration + repeated Integration result = 1; +} + +message AllIntegrationsStreamResponse { + // Integration + Integration result = 1; +} + +message IntegrationRequest { + // Id of integration + string integration_id = 297185574; + // Requested fields. + string fields = 318677073; +} + +message IntegrationResponse { + // Integration + Integration result = 1; +} + +message UpdateIntegrationRequest { + // Id of integration + string integration_id = 297185574; + Integration body = 3029410; + // Requested fields. + string fields = 318677073; +} + +message UpdateIntegrationResponse { + // Integration + Integration result = 1; +} + +message FetchIntegrationFormRequest { + // Id of integration + string integration_id = 297185574; + map body = 3029410; +} + +message FetchIntegrationFormResponse { + // Data Action Form + DataActionForm result = 1; +} + +message TestIntegrationRequest { + // Id of integration + string integration_id = 297185574; +} + +message TestIntegrationResponse { + // Test Result + IntegrationTestResult result = 1; +} + +message AllLooksRequest { + // Requested fields. + string fields = 318677073; +} + +message AllLooksResponse { + // Look + repeated Look result = 1; +} + +message AllLooksStreamResponse { + // Look + Look result = 1; +} + +message CreateLookRequest { + LookWithQuery body = 3029410; + // Requested fields. + string fields = 318677073; +} + +message CreateLookResponse { + // Look + LookWithQuery result = 1; +} + +message SearchLooksRequest { + // Match look id. + string id = 3355; + // Match Look title. + string title = 110371416; + // Match Look description. + string description = 431136513; + // Select looks with a particular content favorite id + string content_favorite_id = 403544586; + // Select looks in a particular folder. + string folder_id = 527488652; + // Select looks created by a particular user. + string user_id = 147132913; + // Select looks with particular view_count value + string view_count = 383588418; + // Select soft-deleted looks + bool deleted = 387615750; + // Select looks that reference a particular query by query_id + int64 query_id = 291663619; + // Exclude items that exist only in personal spaces other than the users + bool curate = 337284075; + // Select looks based on when they were last viewed + string last_viewed_at = 273134349; + // Requested fields. + string fields = 318677073; + // Requested page. + int64 page = 3433103; + // Results per page. + int64 per_page = 424711281; + // Number of results to return. (used with offset and takes priority over page and per_page) + int64 limit = 102976443; + // Number of results to skip before returning any. (used with limit and takes priority over page and per_page) + int64 offset = 509889974; + // One or more fields to sort results by. Sortable fields: [:title, :user_id, :id, :created_at, :space_id, :folder_id, :description, :updated_at, :last_updater_id, :view_count, :favorite_count, :content_favorite_id, :deleted, :deleted_at, :last_viewed_at, :last_accessed_at, :query_id] + string sorts = 109624981; + // Combine given search criteria in a boolean OR expression + bool filter_or = 440686075; +} + +message SearchLooksResponse { + // looks + repeated Look result = 1; +} + +message SearchLooksStreamResponse { + // looks + Look result = 1; +} + +message LookRequest { + // Id of look + int64 look_id = 349778619; + // Requested fields. + string fields = 318677073; +} + +message LookResponse { + // Look + LookWithQuery result = 1; +} + +message UpdateLookRequest { + // Id of look + int64 look_id = 349778619; + LookWithQuery body = 3029410; + // Requested fields. + string fields = 318677073; +} + +message UpdateLookResponse { + // Look + LookWithQuery result = 1; +} + +message DeleteLookRequest { + // Id of look + int64 look_id = 349778619; +} + +message DeleteLookResponse { + // Successfully deleted. + string result = 1; +} + +message RunLookRequest { + // Id of look + int64 look_id = 349778619; + // Format of result + string result_format = 283324265; + // Row limit (may override the limit in the saved query). + int64 limit = 102976443; + // Apply model-specified formatting to each result. + bool apply_formatting = 400669803; + // Apply visualization options to results. + bool apply_vis = 518830860; + // Get results from cache if available. + bool cache = 94416770; + // Render width for image formats. + int64 image_width = 443391367; + // Render height for image formats. + int64 image_height = 421050507; + // Generate drill links (only applicable to 'json_detail' format. + bool generate_drill_links = 88455413; + // Force use of production models even if the user is in development mode. + bool force_production = 378049187; + // Retrieve any results from cache even if the results have expired. + bool cache_only = 28936777; + // Prefix to use for drill links (url encoded). + string path_prefix = 525914619; + // Rebuild PDTS used in query. + bool rebuild_pdts = 256532759; + // Perform table calculations on query results + bool server_table_calcs = 334030724; +} + +message RunLookResponse { + +} + +message AllLookmlModelsRequest { + // Requested fields. + string fields = 318677073; +} + +message AllLookmlModelsResponse { + // LookML Model + repeated LookmlModel result = 1; +} + +message AllLookmlModelsStreamResponse { + // LookML Model + LookmlModel result = 1; +} + +message CreateLookmlModelRequest { + LookmlModel body = 3029410; +} + +message CreateLookmlModelResponse { + // LookML Model + LookmlModel result = 1; +} + +message LookmlModelRequest { + // Name of lookml model. + string lookml_model_name = 273434863; + // Requested fields. + string fields = 318677073; +} + +message LookmlModelResponse { + // LookML Model + LookmlModel result = 1; +} + +message UpdateLookmlModelRequest { + // Name of lookml model. + string lookml_model_name = 273434863; + LookmlModel body = 3029410; +} + +message UpdateLookmlModelResponse { + // LookML Model + LookmlModel result = 1; +} + +message DeleteLookmlModelRequest { + // Name of lookml model. + string lookml_model_name = 273434863; +} + +message DeleteLookmlModelResponse { + // Successfully deleted. + string result = 1; +} + +message LookmlModelExploreRequest { + // Name of lookml model. + string lookml_model_name = 273434863; + // Name of explore. + string explore_name = 485574866; + // Requested fields. + string fields = 318677073; +} + +message LookmlModelExploreResponse { + // LookML Model Explore + LookmlModelExplore result = 1; +} + +message ModelFieldnameSuggestionsRequest { + // Name of model + string model_name = 526217848; + // Name of view + string view_name = 393300486; + // Name of field to use for suggestions + string field_name = 288329560; + // Search term + string term = 3556460; + // Suggestion filters + string filters = 427273730; +} + +message ModelFieldnameSuggestionsResponse { + // Model view field suggestions + ModelFieldSuggestions result = 1; +} + +message ConnectionDatabasesRequest { + // Name of connection + string connection_name = 365832102; +} + +message ConnectionDatabasesResponse { + // Database names + repeated string result = 1; +} + +message ConnectionDatabasesStreamResponse { + // Database names + string result = 1; +} + +message ConnectionFeaturesRequest { + // Name of connection + string connection_name = 365832102; + // Requested fields. + string fields = 318677073; +} + +message ConnectionFeaturesResponse { + // Connection features + ConnectionFeatures result = 1; +} + +message ConnectionSchemasRequest { + // Name of connection + string connection_name = 365832102; + // For dialects that support multiple databases, optionally identify which to use + string database = 447366238; + // True to use fetch from cache, false to load fresh + bool cache = 94416770; + // Requested fields. + string fields = 318677073; +} + +message ConnectionSchemasResponse { + // Schemas for connection + repeated Schema result = 1; +} + +message ConnectionSchemasStreamResponse { + // Schemas for connection + Schema result = 1; +} + +message ConnectionTablesRequest { + // Name of connection + string connection_name = 365832102; + // Optional. Name of database to use for the query, only if applicable + string database = 447366238; + // Optional. Return only tables for this schema + string schema_name = 505336523; + // True to fetch from cache, false to load fresh + bool cache = 94416770; + // Requested fields. + string fields = 318677073; +} + +message ConnectionTablesResponse { + // Schemas and tables for connection + repeated SchemaTables result = 1; +} + +message ConnectionTablesStreamResponse { + // Schemas and tables for connection + SchemaTables result = 1; +} + +message ConnectionColumnsRequest { + // Name of connection + string connection_name = 365832102; + // For dialects that support multiple databases, optionally identify which to use + string database = 447366238; + // Name of schema to use. + string schema_name = 505336523; + // True to fetch from cache, false to load fresh + bool cache = 94416770; + // limits the tables per schema returned + int64 table_limit = 299740165; + // only fetch columns for a given (comma-separated) list of tables + string table_names = 300544459; + // Requested fields. + string fields = 318677073; +} + +message ConnectionColumnsResponse { + // Columns schema for connection + repeated SchemaColumns result = 1; +} + +message ConnectionColumnsStreamResponse { + // Columns schema for connection + SchemaColumns result = 1; +} + +message ConnectionSearchColumnsRequest { + // Name of connection + string connection_name = 365832102; + // Column name to find + string column_name = 417463574; + // Requested fields. + string fields = 318677073; +} + +message ConnectionSearchColumnsResponse { + // Column names matching search pattern + repeated ColumnSearch result = 1; +} + +message ConnectionSearchColumnsStreamResponse { + // Column names matching search pattern + ColumnSearch result = 1; +} + +message ConnectionCostEstimateRequest { + // Name of connection + string connection_name = 365832102; + CreateCostEstimate body = 3029410; + // Requested fields. + string fields = 318677073; +} + +message ConnectionCostEstimateResponse { + // Connection cost estimates + CostEstimate result = 1; +} + +message LockAllRequest { + // Id of project + string project_id = 492492543; + // Requested fields + string fields = 318677073; +} + +message LockAllResponse { + +} + +message AllGitBranchesRequest { + // Project Id + string project_id = 492492543; +} + +message AllGitBranchesResponse { + // Git Branch + repeated GitBranch result = 1; +} + +message AllGitBranchesStreamResponse { + // Git Branch + GitBranch result = 1; +} + +message GitBranchRequest { + // Project Id + string project_id = 492492543; +} + +message GitBranchResponse { + // Git Branch + GitBranch result = 1; +} + +message UpdateGitBranchRequest { + // Project Id + string project_id = 492492543; + GitBranch body = 3029410; +} + +message UpdateGitBranchResponse { + // Git Branch + GitBranch result = 1; +} + +message CreateGitBranchRequest { + // Project Id + string project_id = 492492543; + GitBranch body = 3029410; +} + +message CreateGitBranchResponse { + // Git Branch + GitBranch result = 1; +} + +message FindGitBranchRequest { + // Project Id + string project_id = 492492543; + // Branch Name + string branch_name = 496000636; +} + +message FindGitBranchResponse { + // Git Branch + GitBranch result = 1; +} + +message DeleteGitBranchRequest { + // Project Id + string project_id = 492492543; + // Branch Name + string branch_name = 496000636; +} + +message DeleteGitBranchResponse { + // Successfully deleted. + string result = 1; +} + +message DeployRefToProductionRequest { + // Id of project + string project_id = 492492543; + // Branch to deploy to production + string branch = 345257623; + // Ref to deploy to production + string ref = 112787; +} + +message DeployRefToProductionResponse { + +} + +message DeployToProductionRequest { + // Id of project + string project_id = 492492543; +} + +message DeployToProductionResponse { + +} + +message ResetProjectToProductionRequest { + // Id of project + string project_id = 492492543; +} + +message ResetProjectToProductionResponse { + +} + +message ResetProjectToRemoteRequest { + // Id of project + string project_id = 492492543; +} + +message ResetProjectToRemoteResponse { + +} + +message AllProjectsRequest { + // Requested fields + string fields = 318677073; +} + +message AllProjectsResponse { + // Project + repeated Project result = 1; +} + +message AllProjectsStreamResponse { + // Project + Project result = 1; +} + +message CreateProjectRequest { + Project body = 3029410; +} + +message CreateProjectResponse { + // Project + Project result = 1; +} + +message ProjectRequest { + // Project Id + string project_id = 492492543; + // Requested fields + string fields = 318677073; +} + +message ProjectResponse { + // Project + Project result = 1; +} + +message UpdateProjectRequest { + // Project Id + string project_id = 492492543; + Project body = 3029410; + // Requested fields + string fields = 318677073; +} + +message UpdateProjectResponse { + // Project + Project result = 1; +} + +message ManifestRequest { + // Project Id + string project_id = 492492543; +} + +message ManifestResponse { + // Manifest + Manifest result = 1; +} + +message GitDeployKeyRequest { + // Project Id + string project_id = 492492543; +} + +message GitDeployKeyResponse { + // The text of the public key portion of the deploy_key + string result = 1; +} + +message CreateGitDeployKeyRequest { + // Project Id + string project_id = 492492543; +} + +message CreateGitDeployKeyResponse { + // Project + string result = 1; +} + +message ProjectValidationResultsRequest { + // Project Id + string project_id = 492492543; + // Requested fields + string fields = 318677073; +} + +message ProjectValidationResultsResponse { + +} + +message ValidateProjectRequest { + // Project Id + string project_id = 492492543; + // Requested fields + string fields = 318677073; +} + +message ValidateProjectResponse { + // Project validation results + ProjectValidation result = 1; +} + +message ProjectWorkspaceRequest { + // Project Id + string project_id = 492492543; + // Requested fields + string fields = 318677073; +} + +message ProjectWorkspaceResponse { + // Project Workspace + ProjectWorkspace result = 1; +} + +message AllProjectFilesRequest { + // Project Id + string project_id = 492492543; + // Requested fields + string fields = 318677073; +} + +message AllProjectFilesResponse { + // Project File + repeated ProjectFile result = 1; +} + +message AllProjectFilesStreamResponse { + // Project File + ProjectFile result = 1; +} + +message ProjectFileRequest { + // Project Id + string project_id = 492492543; + // File Id + string file_id = 427500193; + // Requested fields + string fields = 318677073; +} + +message ProjectFileResponse { + // Project File + ProjectFile result = 1; +} + +message AllGitConnectionTestsRequest { + // Project Id + string project_id = 492492543; + // (Optional: leave blank for root project) The remote url for remote dependency to test. + string remote_url = 520826107; +} + +message AllGitConnectionTestsResponse { + // Git Connection Test + repeated GitConnectionTest result = 1; +} + +message AllGitConnectionTestsStreamResponse { + // Git Connection Test + GitConnectionTest result = 1; +} + +message RunGitConnectionTestRequest { + // Project Id + string project_id = 492492543; + // Test Id + string test_id = 355613958; + // (Optional: leave blank for root project) The remote url for remote dependency to test. + string remote_url = 520826107; +} + +message RunGitConnectionTestResponse { + // Git Connection Test Result + GitConnectionTestResult result = 1; +} + +message AllLookmlTestsRequest { + // Project Id + string project_id = 492492543; + // File Id + string file_id = 427500193; +} + +message AllLookmlTestsResponse { + // LookML Test + repeated LookmlTest result = 1; +} + +message AllLookmlTestsStreamResponse { + // LookML Test + LookmlTest result = 1; +} + +message RunLookmlTestRequest { + // Project Id + string project_id = 492492543; + // File Name + string file_id = 427500193; + // Test Name + string test = 3556498; + // Model Name + string model = 104069929; +} + +message RunLookmlTestResponse { + // LookML Test Results + repeated LookmlTestResult result = 1; +} + +message RunLookmlTestStreamResponse { + // LookML Test Results + LookmlTestResult result = 1; +} + +message UpdateRepositoryCredentialRequest { + // Root Project Id + string root_project_id = 379036192; + // Credential Id + string credential_id = 371260031; + RepositoryCredential body = 3029410; +} + +message UpdateRepositoryCredentialResponse { + // Repository Credential + RepositoryCredential result = 1; +} + +message DeleteRepositoryCredentialRequest { + // Root Project Id + string root_project_id = 379036192; + // Credential Id + string credential_id = 371260031; +} + +message DeleteRepositoryCredentialResponse { + // Successfully deleted. + string result = 1; +} + +message GetAllRepositoryCredentialsRequest { + // Root Project Id + string root_project_id = 379036192; +} + +message GetAllRepositoryCredentialsResponse { + // Repository Credential + repeated RepositoryCredential result = 1; +} + +message GetAllRepositoryCredentialsStreamResponse { + // Repository Credential + RepositoryCredential result = 1; +} + +message CreateQueryTaskRequest { + CreateQueryTask body = 3029410; + // Row limit (may override the limit in the saved query). + int64 limit = 102976443; + // Apply model-specified formatting to each result. + bool apply_formatting = 400669803; + // Apply visualization options to results. + bool apply_vis = 518830860; + // Get results from cache if available. + bool cache = 94416770; + // Render width for image formats. + int64 image_width = 443391367; + // Render height for image formats. + int64 image_height = 421050507; + // Generate drill links (only applicable to 'json_detail' format. + bool generate_drill_links = 88455413; + // Force use of production models even if the user is in development mode. + bool force_production = 378049187; + // Retrieve any results from cache even if the results have expired. + bool cache_only = 28936777; + // Prefix to use for drill links (url encoded). + string path_prefix = 525914619; + // Rebuild PDTS used in query. + bool rebuild_pdts = 256532759; + // Perform table calculations on query results + bool server_table_calcs = 334030724; + // Requested fields + string fields = 318677073; +} + +message CreateQueryTaskResponse { + // query_task + QueryTask result = 1; +} + +message QueryTaskMultiResultsRequest { + // List of Query Task IDs + string query_task_ids = 467208869; +} + +message QueryTaskMultiResultsResponse { + // Multiple query results + map result = 1; +} + +message QueryTaskRequest { + // ID of the Query Task + string query_task_id = 435207576; + // Requested fields. + string fields = 318677073; +} + +message QueryTaskResponse { + // query_task + QueryTask result = 1; +} + +message QueryTaskResultsRequest { + // ID of the Query Task + string query_task_id = 435207576; +} + +message QueryTaskResultsResponse { + +} + +message QueryRequest { + // Id of query + int64 query_id = 291663619; + // Requested fields. + string fields = 318677073; +} + +message QueryResponse { + // Query + Query result = 1; +} + +message QueryForSlugRequest { + // Slug of query + string slug = 3533483; + // Requested fields. + string fields = 318677073; +} + +message QueryForSlugResponse { + // Query + Query result = 1; +} + +message CreateQueryRequest { + Query body = 3029410; + // Requested fields. + string fields = 318677073; +} + +message CreateQueryResponse { + // Query + Query result = 1; +} + +message RunQueryRequest { + // Id of query + int64 query_id = 291663619; + // Format of result + string result_format = 283324265; + // Row limit (may override the limit in the saved query). + int64 limit = 102976443; + // Apply model-specified formatting to each result. + bool apply_formatting = 400669803; + // Apply visualization options to results. + bool apply_vis = 518830860; + // Get results from cache if available. + bool cache = 94416770; + // Render width for image formats. + int64 image_width = 443391367; + // Render height for image formats. + int64 image_height = 421050507; + // Generate drill links (only applicable to 'json_detail' format. + bool generate_drill_links = 88455413; + // Force use of production models even if the user is in development mode. + bool force_production = 378049187; + // Retrieve any results from cache even if the results have expired. + bool cache_only = 28936777; + // Prefix to use for drill links (url encoded). + string path_prefix = 525914619; + // Rebuild PDTS used in query. + bool rebuild_pdts = 256532759; + // Perform table calculations on query results + bool server_table_calcs = 334030724; +} + +message RunQueryResponse { + +} + +message RunInlineQueryRequest { + // Format of result + string result_format = 283324265; + Query body = 3029410; + // Row limit (may override the limit in the saved query). + int64 limit = 102976443; + // Apply model-specified formatting to each result. + bool apply_formatting = 400669803; + // Apply visualization options to results. + bool apply_vis = 518830860; + // Get results from cache if available. + bool cache = 94416770; + // Render width for image formats. + int64 image_width = 443391367; + // Render height for image formats. + int64 image_height = 421050507; + // Generate drill links (only applicable to 'json_detail' format. + bool generate_drill_links = 88455413; + // Force use of production models even if the user is in development mode. + bool force_production = 378049187; + // Retrieve any results from cache even if the results have expired. + bool cache_only = 28936777; + // Prefix to use for drill links (url encoded). + string path_prefix = 525914619; + // Rebuild PDTS used in query. + bool rebuild_pdts = 256532759; + // Perform table calculations on query results + bool server_table_calcs = 334030724; +} + +message RunInlineQueryResponse { + +} + +message RunUrlEncodedQueryRequest { + // Model name + string model_name = 526217848; + // View name + string view_name = 393300486; + // Format of result + string result_format = 283324265; +} + +message RunUrlEncodedQueryResponse { + +} + +message MergeQueryRequest { + // Merge Query Id + string merge_query_id = 289061881; + // Requested fields + string fields = 318677073; +} + +message MergeQueryResponse { + // Merge Query + MergeQuery result = 1; +} + +message CreateMergeQueryRequest { + MergeQuery body = 3029410; + // Requested fields + string fields = 318677073; +} + +message CreateMergeQueryResponse { + // Merge Query + MergeQuery result = 1; +} + +message AllRunningQueriesRequest { + +} + +message AllRunningQueriesResponse { + // Running Queries. + repeated RunningQueries result = 1; +} + +message AllRunningQueriesStreamResponse { + // Running Queries. + RunningQueries result = 1; +} + +message KillQueryRequest { + // Query task id. + string query_task_id = 435207576; +} + +message KillQueryResponse { + // Query successfully killed. + string result = 1; +} + +message SqlQueryRequest { + // slug of query + string slug = 3533483; +} + +message SqlQueryResponse { + // SQL Runner Query + SqlQuery result = 1; +} + +message CreateSqlQueryRequest { + SqlQueryCreate body = 3029410; +} + +message CreateSqlQueryResponse { + // SQL Runner Query + SqlQuery result = 1; +} + +message RunSqlQueryRequest { + // slug of query + string slug = 3533483; + // Format of result, options are: ["inline_json", "json", "json_detail", "json_fe", "csv", "html", "md", "txt", "xlsx", "gsxml", "json_label"] + string result_format = 283324265; + // Defaults to false. If set to true, the HTTP response will have content-disposition and other headers set to make the HTTP response behave as a downloadable attachment instead of as inline content. + string download = 356954658; +} + +message RunSqlQueryResponse { + +} + +message CreateLookRenderTaskRequest { + // Id of look to render + int64 look_id = 349778619; + // Output type: png, or jpg + string result_format = 283324265; + // Output width in pixels + int64 width = 113126854; + // Output height in pixels + int64 height = 305257398; + // Requested fields. + string fields = 318677073; +} + +message CreateLookRenderTaskResponse { + // Render Task + RenderTask result = 1; +} + +message CreateQueryRenderTaskRequest { + // Id of the query to render + int64 query_id = 291663619; + // Output type: png or jpg + string result_format = 283324265; + // Output width in pixels + int64 width = 113126854; + // Output height in pixels + int64 height = 305257398; + // Requested fields. + string fields = 318677073; +} + +message CreateQueryRenderTaskResponse { + // Render Task + RenderTask result = 1; +} + +message CreateDashboardRenderTaskRequest { + // Id of dashboard to render. The ID can be a LookML dashboard also. + string dashboard_id = 496187565; + // Output type: pdf, png, or jpg + string result_format = 283324265; + CreateDashboardRenderTask body = 3029410; + // Output width in pixels + int64 width = 113126854; + // Output height in pixels + int64 height = 305257398; + // Requested fields. + string fields = 318677073; + // Paper size for pdf. Value can be one of: ["letter","legal","tabloid","a0","a1","a2","a3","a4","a5"] + string pdf_paper_size = 491759120; + // Whether to render pdf in landscape paper orientation + bool pdf_landscape = 359176724; + // Whether or not to expand table vis to full length + bool long_tables = 359736222; +} + +message CreateDashboardRenderTaskResponse { + // Render Task + RenderTask result = 1; +} + +message RenderTaskRequest { + // Id of render task + string render_task_id = 274685669; + // Requested fields. + string fields = 318677073; +} + +message RenderTaskResponse { + // Render Task + RenderTask result = 1; +} + +message RenderTaskResultsRequest { + // Id of render task + string render_task_id = 274685669; +} + +message RenderTaskResultsResponse { + +} + +message SearchModelSetsRequest { + // Requested fields. + string fields = 318677073; + // Number of results to return (used with `offset`). + int64 limit = 102976443; + // Number of results to skip before returning any (used with `limit`). + int64 offset = 509889974; + // Fields to sort by. + string sorts = 109624981; + // Match model set id. + int64 id = 3355; + // Match model set name. + string name = 3373707; + // Match model sets by all_access status. + bool all_access = 505719009; + // Match model sets by built_in status. + bool built_in = 357544798; + // Combine given search criteria in a boolean OR expression. + bool filter_or = 440686075; +} + +message SearchModelSetsResponse { + // Model Set + repeated ModelSet result = 1; +} + +message SearchModelSetsStreamResponse { + // Model Set + ModelSet result = 1; +} + +message ModelSetRequest { + // Id of model set + int64 model_set_id = 1141778; + // Requested fields. + string fields = 318677073; +} + +message ModelSetResponse { + // Specified model set. + ModelSet result = 1; +} + +message UpdateModelSetRequest { + // id of model set + int64 model_set_id = 1141778; + ModelSet body = 3029410; +} + +message UpdateModelSetResponse { + // New state for specified model set. + ModelSet result = 1; +} + +message DeleteModelSetRequest { + // id of model set + int64 model_set_id = 1141778; +} + +message DeleteModelSetResponse { + // Model set succssfully deleted. + string result = 1; +} + +message AllModelSetsRequest { + // Requested fields. + string fields = 318677073; +} + +message AllModelSetsResponse { + // All model sets. + repeated ModelSet result = 1; +} + +message AllModelSetsStreamResponse { + // All model sets. + ModelSet result = 1; +} + +message CreateModelSetRequest { + ModelSet body = 3029410; +} + +message CreateModelSetResponse { + // Newly created ModelSet + ModelSet result = 1; +} + +message AllPermissionsRequest { + +} + +message AllPermissionsResponse { + // Permission + repeated Permission result = 1; +} + +message AllPermissionsStreamResponse { + // Permission + Permission result = 1; +} + +message SearchPermissionSetsRequest { + // Requested fields. + string fields = 318677073; + // Number of results to return (used with `offset`). + int64 limit = 102976443; + // Number of results to skip before returning any (used with `limit`). + int64 offset = 509889974; + // Fields to sort by. + string sorts = 109624981; + // Match permission set id. + int64 id = 3355; + // Match permission set name. + string name = 3373707; + // Match permission sets by all_access status. + bool all_access = 505719009; + // Match permission sets by built_in status. + bool built_in = 357544798; + // Combine given search criteria in a boolean OR expression. + bool filter_or = 440686075; +} + +message SearchPermissionSetsResponse { + // Permission Set + repeated PermissionSet result = 1; +} + +message SearchPermissionSetsStreamResponse { + // Permission Set + PermissionSet result = 1; +} + +message PermissionSetRequest { + // Id of permission set + int64 permission_set_id = 379867460; + // Requested fields. + string fields = 318677073; +} + +message PermissionSetResponse { + // Permission Set + PermissionSet result = 1; +} + +message UpdatePermissionSetRequest { + // id of permission set + int64 permission_set_id = 379867460; + PermissionSet body = 3029410; +} + +message UpdatePermissionSetResponse { + // Permission Set + PermissionSet result = 1; +} + +message DeletePermissionSetRequest { + // Id of permission set + int64 permission_set_id = 379867460; +} + +message DeletePermissionSetResponse { + // Successfully deleted. + string result = 1; +} + +message AllPermissionSetsRequest { + // Requested fields. + string fields = 318677073; +} + +message AllPermissionSetsResponse { + // Permission Set + repeated PermissionSet result = 1; +} + +message AllPermissionSetsStreamResponse { + // Permission Set + PermissionSet result = 1; +} + +message CreatePermissionSetRequest { + PermissionSet body = 3029410; +} + +message CreatePermissionSetResponse { + // Permission Set + PermissionSet result = 1; +} + +message AllRolesRequest { + // Requested fields. + string fields = 318677073; + // Optional list of ids to get specific roles. + string ids = 104120; +} + +message AllRolesResponse { + // Role + repeated Role result = 1; +} + +message AllRolesStreamResponse { + // Role + Role result = 1; +} + +message CreateRoleRequest { + Role body = 3029410; +} + +message CreateRoleResponse { + // Role + Role result = 1; +} + +message SearchRolesRequest { + // Requested fields. + string fields = 318677073; + // Number of results to return (used with `offset`). + int64 limit = 102976443; + // Number of results to skip before returning any (used with `limit`). + int64 offset = 509889974; + // Fields to sort by. + string sorts = 109624981; + // Match role id. + int64 id = 3355; + // Match role name. + string name = 3373707; + // Match roles by built_in status. + bool built_in = 357544798; + // Combine given search criteria in a boolean OR expression. + bool filter_or = 440686075; +} + +message SearchRolesResponse { + // Role + repeated Role result = 1; +} + +message SearchRolesStreamResponse { + // Role + Role result = 1; +} + +message RoleRequest { + // id of role + int64 role_id = 344221025; +} + +message RoleResponse { + // Role + Role result = 1; +} + +message UpdateRoleRequest { + // id of role + int64 role_id = 344221025; + Role body = 3029410; +} + +message UpdateRoleResponse { + // Role + Role result = 1; +} + +message DeleteRoleRequest { + // id of role + int64 role_id = 344221025; +} + +message DeleteRoleResponse { + // Successfully deleted. + string result = 1; +} + +message RoleGroupsRequest { + // id of role + int64 role_id = 344221025; + // Requested fields. + string fields = 318677073; +} + +message RoleGroupsResponse { + // Groups with role. + repeated Group result = 1; +} + +message RoleGroupsStreamResponse { + // Groups with role. + Group result = 1; +} + +message SetRoleGroupsRequest { + // Id of Role + int64 role_id = 344221025; + repeated int64 body = 3029410; +} + +message SetRoleGroupsResponse { + // Groups with role. + repeated Group result = 1; +} + +message SetRoleGroupsStreamResponse { + // Groups with role. + Group result = 1; +} + +message RoleUsersRequest { + // id of user + int64 role_id = 344221025; + // Requested fields. + string fields = 318677073; + // Get only users associated directly with the role: exclude those only associated through groups. + bool direct_association_only = 533341840; +} + +message RoleUsersResponse { + // Users with role. + repeated User result = 1; +} + +message RoleUsersStreamResponse { + // Users with role. + User result = 1; +} + +message SetRoleUsersRequest { + // id of role + int64 role_id = 344221025; + repeated int64 body = 3029410; +} + +message SetRoleUsersResponse { + // Users with role. + repeated User result = 1; +} + +message SetRoleUsersStreamResponse { + // Users with role. + User result = 1; +} + +message ScheduledPlansForSpaceRequest { + // Space Id + int64 space_id = 511862461; + // Requested fields. + string fields = 318677073; +} + +message ScheduledPlansForSpaceResponse { + // Scheduled Plan + repeated ScheduledPlan result = 1; +} + +message ScheduledPlansForSpaceStreamResponse { + // Scheduled Plan + ScheduledPlan result = 1; +} + +message ScheduledPlanRequest { + // Scheduled Plan Id + int64 scheduled_plan_id = 335709463; + // Requested fields. + string fields = 318677073; +} + +message ScheduledPlanResponse { + // Scheduled Plan + ScheduledPlan result = 1; +} + +message UpdateScheduledPlanRequest { + // Scheduled Plan Id + int64 scheduled_plan_id = 335709463; + ScheduledPlan body = 3029410; +} + +message UpdateScheduledPlanResponse { + // Scheduled Plan + ScheduledPlan result = 1; +} + +message DeleteScheduledPlanRequest { + // Scheduled Plan Id + int64 scheduled_plan_id = 335709463; +} + +message DeleteScheduledPlanResponse { + // Successfully deleted. + string result = 1; +} + +message AllScheduledPlansRequest { + // Return scheduled plans belonging to this user_id. If not provided, returns scheduled plans owned by the caller. + int64 user_id = 147132913; + // Comma delimited list of field names. If provided, only the fields specified will be included in the response + string fields = 318677073; + // Return scheduled plans belonging to all users (caller needs see_schedules permission) + bool all_users = 86970902; +} + +message AllScheduledPlansResponse { + // Scheduled Plan + repeated ScheduledPlan result = 1; +} + +message AllScheduledPlansStreamResponse { + // Scheduled Plan + ScheduledPlan result = 1; +} + +message CreateScheduledPlanRequest { + ScheduledPlan body = 3029410; +} + +message CreateScheduledPlanResponse { + // Scheduled Plan + ScheduledPlan result = 1; +} + +message ScheduledPlanRunOnceRequest { + ScheduledPlan body = 3029410; +} + +message ScheduledPlanRunOnceResponse { + // Scheduled Plan + ScheduledPlan result = 1; +} + +message ScheduledPlansForLookRequest { + // Look Id + int64 look_id = 349778619; + // User Id (default is requesting user if not specified) + int64 user_id = 147132913; + // Requested fields. + string fields = 318677073; + // Return scheduled plans belonging to all users for the look + bool all_users = 86970902; +} + +message ScheduledPlansForLookResponse { + // Scheduled Plan + repeated ScheduledPlan result = 1; +} + +message ScheduledPlansForLookStreamResponse { + // Scheduled Plan + ScheduledPlan result = 1; +} + +message ScheduledPlansForDashboardRequest { + // Dashboard Id + int64 dashboard_id = 496187565; + // User Id (default is requesting user if not specified) + int64 user_id = 147132913; + // Return scheduled plans belonging to all users for the dashboard + bool all_users = 86970902; + // Requested fields. + string fields = 318677073; +} + +message ScheduledPlansForDashboardResponse { + // Scheduled Plan + repeated ScheduledPlan result = 1; +} + +message ScheduledPlansForDashboardStreamResponse { + // Scheduled Plan + ScheduledPlan result = 1; +} + +message ScheduledPlansForLookmlDashboardRequest { + // LookML Dashboard Id + string lookml_dashboard_id = 339184839; + // User Id (default is requesting user if not specified) + int64 user_id = 147132913; + // Requested fields. + string fields = 318677073; + // Return scheduled plans belonging to all users for the dashboard + bool all_users = 86970902; +} + +message ScheduledPlansForLookmlDashboardResponse { + // Scheduled Plan + repeated ScheduledPlan result = 1; +} + +message ScheduledPlansForLookmlDashboardStreamResponse { + // Scheduled Plan + ScheduledPlan result = 1; +} + +message ScheduledPlanRunOnceByIdRequest { + // Id of schedule plan to copy and run + int64 scheduled_plan_id = 335709463; + WriteScheduledPlan body = 3029410; +} + +message ScheduledPlanRunOnceByIdResponse { + // Scheduled Plan + ScheduledPlan result = 1; +} + +message SessionRequest { + +} + +message SessionResponse { + // Session + ApiSession result = 1; +} + +message UpdateSessionRequest { + ApiSession body = 3029410; +} + +message UpdateSessionResponse { + // Session + ApiSession result = 1; +} + +message AllThemesRequest { + // Requested fields. + string fields = 318677073; +} + +message AllThemesResponse { + // Themes + repeated Theme result = 1; +} + +message AllThemesStreamResponse { + // Themes + Theme result = 1; +} + +message CreateThemeRequest { + Theme body = 3029410; +} + +message CreateThemeResponse { + // Theme + Theme result = 1; +} + +message SearchThemesRequest { + // Match theme id. + int64 id = 3355; + // Match theme name. + string name = 3373707; + // Timestamp for activation. + google.protobuf.Timestamp begin_at = 372355810; + // Timestamp for expiration. + google.protobuf.Timestamp end_at = 324690554; + // Number of results to return (used with `offset`). + int64 limit = 102976443; + // Number of results to skip before returning any (used with `limit`). + int64 offset = 509889974; + // Fields to sort by. + string sorts = 109624981; + // Requested fields. + string fields = 318677073; + // Combine given search criteria in a boolean OR expression + bool filter_or = 440686075; +} + +message SearchThemesResponse { + // Themes + repeated Theme result = 1; +} + +message SearchThemesStreamResponse { + // Themes + Theme result = 1; +} + +message DefaultThemeRequest { + // Timestamp representing the target datetime for the active period. Defaults to 'now' + google.protobuf.Timestamp ts = 3711; +} + +message DefaultThemeResponse { + // Theme + Theme result = 1; +} + +message SetDefaultThemeRequest { + // Name of theme to set as default + string name = 3373707; +} + +message SetDefaultThemeResponse { + // Theme + Theme result = 1; +} + +message ActiveThemesRequest { + // Name of theme + string name = 3373707; + // Timestamp representing the target datetime for the active period. Defaults to 'now' + google.protobuf.Timestamp ts = 3711; + // Requested fields. + string fields = 318677073; +} + +message ActiveThemesResponse { + // Themes + repeated Theme result = 1; +} + +message ActiveThemesStreamResponse { + // Themes + Theme result = 1; +} + +message ThemeOrDefaultRequest { + // Name of theme + string name = 3373707; + // Timestamp representing the target datetime for the active period. Defaults to 'now' + google.protobuf.Timestamp ts = 3711; +} + +message ThemeOrDefaultResponse { + // Theme + Theme result = 1; +} + +message ValidateThemeRequest { + Theme body = 3029410; +} + +message ValidateThemeResponse { + +} + +message ThemeRequest { + // Id of theme + int64 theme_id = 277237460; + // Requested fields. + string fields = 318677073; +} + +message ThemeResponse { + // Theme + Theme result = 1; +} + +message UpdateThemeRequest { + // Id of theme + int64 theme_id = 277237460; + Theme body = 3029410; +} + +message UpdateThemeResponse { + // Theme + Theme result = 1; +} + +message DeleteThemeRequest { + // Id of theme + string theme_id = 277237460; +} + +message DeleteThemeResponse { + // Successfully deleted. + string result = 1; +} + +message MeRequest { + // Requested fields. + string fields = 318677073; +} + +message MeResponse { + // Current user. + User result = 1; +} + +message AllUsersRequest { + // Requested fields. + string fields = 318677073; + // Requested page. + int64 page = 3433103; + // Results per page. + int64 per_page = 424711281; + // Fields to sort by. + string sorts = 109624981; + // Optional list of ids to get specific users. + string ids = 104120; +} + +message AllUsersResponse { + // All users. + repeated User result = 1; +} + +message AllUsersStreamResponse { + // All users. + User result = 1; +} + +message CreateUserRequest { + User body = 3029410; + // Requested fields. + string fields = 318677073; +} + +message CreateUserResponse { + // Created User + User result = 1; +} + +message SearchUsersRequest { + // Include only these fields in the response + string fields = 318677073; + // Return only page N of paginated results + int64 page = 3433103; + // Return N rows of data per page + int64 per_page = 424711281; + // Fields to sort by. + string sorts = 109624981; + // Match User Id. + string id = 3355; + // Match First name. + string first_name = 160985414; + // Match Last name. + string last_name = 503280549; + // Search for user accounts associated with Looker employees + bool verified_looker_employee = 471763269; + // Search for the user with this email address + string email = 96619420; + // Search for disabled user accounts + bool is_disabled = 464089615; + // Combine given search criteria in a boolean OR expression + bool filter_or = 440686075; + // Search for users who have access to this content_metadata item + string content_metadata_id = 293222822; + // Search for users who are direct members of this group + string group_id = 506361563; +} + +message SearchUsersResponse { + // Matching users. + repeated User result = 1; +} + +message SearchUsersStreamResponse { + // Matching users. + User result = 1; +} + +message SearchUsersNamesRequest { + // Pattern to match + string pattern = 395545144; + // Include only these fields in the response + string fields = 318677073; + // Return only page N of paginated results + int64 page = 3433103; + // Return N rows of data per page + int64 per_page = 424711281; + // Fields to sort by + string sorts = 109624981; + // Match User Id + int64 id = 3355; + // Match First name + string first_name = 160985414; + // Match Last name + string last_name = 503280549; + // Match Verified Looker employee + bool verified_looker_employee = 471763269; + // Match Email Address + string email = 96619420; + // Include or exclude disabled accounts in the results + bool is_disabled = 464089615; +} + +message SearchUsersNamesResponse { + // Matching users. + repeated User result = 1; +} + +message SearchUsersNamesStreamResponse { + // Matching users. + User result = 1; +} + +message UserRequest { + // Id of user + int64 user_id = 147132913; + // Requested fields. + string fields = 318677073; +} + +message UserResponse { + // Specified user. + User result = 1; +} + +message UpdateUserRequest { + // Id of user + int64 user_id = 147132913; + User body = 3029410; + // Requested fields. + string fields = 318677073; +} + +message UpdateUserResponse { + // New state for specified user. + User result = 1; +} + +message DeleteUserRequest { + // Id of user + int64 user_id = 147132913; +} + +message DeleteUserResponse { + // User successfully deleted. + string result = 1; +} + +message UserForCredentialRequest { + // Type name of credential + string credential_type = 298516599; + // Id of credential + string credential_id = 371260031; + // Requested fields. + string fields = 318677073; +} + +message UserForCredentialResponse { + // Specified user. + User result = 1; +} + +message UserCredentialsEmailRequest { + // id of user + int64 user_id = 147132913; + // Requested fields. + string fields = 318677073; +} + +message UserCredentialsEmailResponse { + // Email/Password Credential + CredentialsEmail result = 1; +} + +message CreateUserCredentialsEmailRequest { + // id of user + int64 user_id = 147132913; + CredentialsEmail body = 3029410; + // Requested fields. + string fields = 318677073; +} + +message CreateUserCredentialsEmailResponse { + // Email/Password Credential + CredentialsEmail result = 1; +} + +message UpdateUserCredentialsEmailRequest { + // id of user + int64 user_id = 147132913; + CredentialsEmail body = 3029410; + // Requested fields. + string fields = 318677073; +} + +message UpdateUserCredentialsEmailResponse { + // Email/Password Credential + CredentialsEmail result = 1; +} + +message DeleteUserCredentialsEmailRequest { + // id of user + int64 user_id = 147132913; +} + +message DeleteUserCredentialsEmailResponse { + // Successfully deleted. + string result = 1; +} + +message UserCredentialsTotpRequest { + // id of user + int64 user_id = 147132913; + // Requested fields. + string fields = 318677073; +} + +message UserCredentialsTotpResponse { + // Two-Factor Credential + CredentialsTotp result = 1; +} + +message CreateUserCredentialsTotpRequest { + // id of user + int64 user_id = 147132913; + CredentialsTotp body = 3029410; + // Requested fields. + string fields = 318677073; +} + +message CreateUserCredentialsTotpResponse { + // Two-Factor Credential + CredentialsTotp result = 1; +} + +message DeleteUserCredentialsTotpRequest { + // id of user + int64 user_id = 147132913; +} + +message DeleteUserCredentialsTotpResponse { + // Successfully deleted. + string result = 1; +} + +message UserCredentialsLdapRequest { + // id of user + int64 user_id = 147132913; + // Requested fields. + string fields = 318677073; +} + +message UserCredentialsLdapResponse { + // LDAP Credential + CredentialsLDAP result = 1; +} + +message DeleteUserCredentialsLdapRequest { + // id of user + int64 user_id = 147132913; +} + +message DeleteUserCredentialsLdapResponse { + // Successfully deleted. + string result = 1; +} + +message UserCredentialsGoogleRequest { + // id of user + int64 user_id = 147132913; + // Requested fields. + string fields = 318677073; +} + +message UserCredentialsGoogleResponse { + // Google Auth Credential + CredentialsGoogle result = 1; +} + +message DeleteUserCredentialsGoogleRequest { + // id of user + int64 user_id = 147132913; +} + +message DeleteUserCredentialsGoogleResponse { + // Successfully deleted. + string result = 1; +} + +message UserCredentialsSamlRequest { + // id of user + int64 user_id = 147132913; + // Requested fields. + string fields = 318677073; +} + +message UserCredentialsSamlResponse { + // Saml Auth Credential + CredentialsSaml result = 1; +} + +message DeleteUserCredentialsSamlRequest { + // id of user + int64 user_id = 147132913; +} + +message DeleteUserCredentialsSamlResponse { + // Successfully deleted. + string result = 1; +} + +message UserCredentialsOidcRequest { + // id of user + int64 user_id = 147132913; + // Requested fields. + string fields = 318677073; +} + +message UserCredentialsOidcResponse { + // OIDC Auth Credential + CredentialsOIDC result = 1; +} + +message DeleteUserCredentialsOidcRequest { + // id of user + int64 user_id = 147132913; +} + +message DeleteUserCredentialsOidcResponse { + // Successfully deleted. + string result = 1; +} + +message UserCredentialsApi3Request { + // Id of user + int64 user_id = 147132913; + // Id of API 3 Credential + int64 credentials_api3_id = 395298239; + // Requested fields. + string fields = 318677073; +} + +message UserCredentialsApi3Response { + // API 3 Credential + CredentialsApi3 result = 1; +} + +message DeleteUserCredentialsApi3Request { + // id of user + int64 user_id = 147132913; + // id of API 3 Credential + int64 credentials_api3_id = 395298239; +} + +message DeleteUserCredentialsApi3Response { + // Successfully deleted. + string result = 1; +} + +message AllUserCredentialsApi3sRequest { + // id of user + int64 user_id = 147132913; + // Requested fields. + string fields = 318677073; +} + +message AllUserCredentialsApi3sResponse { + // API 3 Credential + repeated CredentialsApi3 result = 1; +} + +message AllUserCredentialsApi3sStreamResponse { + // API 3 Credential + CredentialsApi3 result = 1; +} + +message CreateUserCredentialsApi3Request { + // id of user + int64 user_id = 147132913; + CredentialsApi3 body = 3029410; + // Requested fields. + string fields = 318677073; +} + +message CreateUserCredentialsApi3Response { + // API 3 Credential + CredentialsApi3 result = 1; +} + +message UserCredentialsEmbedRequest { + // Id of user + int64 user_id = 147132913; + // Id of Embedding Credential + int64 credentials_embed_id = 350409591; + // Requested fields. + string fields = 318677073; +} + +message UserCredentialsEmbedResponse { + // Embedding Credential + CredentialsEmbed result = 1; +} + +message DeleteUserCredentialsEmbedRequest { + // id of user + int64 user_id = 147132913; + // id of Embedding Credential + int64 credentials_embed_id = 350409591; +} + +message DeleteUserCredentialsEmbedResponse { + // Successfully deleted. + string result = 1; +} + +message AllUserCredentialsEmbedsRequest { + // id of user + int64 user_id = 147132913; + // Requested fields. + string fields = 318677073; +} + +message AllUserCredentialsEmbedsResponse { + // Embedding Credential + repeated CredentialsEmbed result = 1; +} + +message AllUserCredentialsEmbedsStreamResponse { + // Embedding Credential + CredentialsEmbed result = 1; +} + +message UserCredentialsLookerOpenidRequest { + // id of user + int64 user_id = 147132913; + // Requested fields. + string fields = 318677073; +} + +message UserCredentialsLookerOpenidResponse { + // Looker OpenId Credential + CredentialsLookerOpenid result = 1; +} + +message DeleteUserCredentialsLookerOpenidRequest { + // id of user + int64 user_id = 147132913; +} + +message DeleteUserCredentialsLookerOpenidResponse { + // Successfully deleted. + string result = 1; +} + +message UserSessionRequest { + // Id of user + int64 user_id = 147132913; + // Id of Web Login Session + int64 session_id = 415463385; + // Requested fields. + string fields = 318677073; +} + +message UserSessionResponse { + // Web Login Session + Session result = 1; +} + +message DeleteUserSessionRequest { + // id of user + int64 user_id = 147132913; + // id of Web Login Session + int64 session_id = 415463385; +} + +message DeleteUserSessionResponse { + // Successfully deleted. + string result = 1; +} + +message AllUserSessionsRequest { + // id of user + int64 user_id = 147132913; + // Requested fields. + string fields = 318677073; +} + +message AllUserSessionsResponse { + // Web Login Session + repeated Session result = 1; +} + +message AllUserSessionsStreamResponse { + // Web Login Session + Session result = 1; +} + +message CreateUserCredentialsEmailPasswordResetRequest { + // Id of user + int64 user_id = 147132913; + // Expiring token. + bool expires = 327308851; + // Requested fields. + string fields = 318677073; +} + +message CreateUserCredentialsEmailPasswordResetResponse { + // email/password credential + CredentialsEmail result = 1; +} + +message UserRolesRequest { + // id of user + int64 user_id = 147132913; + // Requested fields. + string fields = 318677073; + // Get only roles associated directly with the user: exclude those only associated through groups. + bool direct_association_only = 533341840; +} + +message UserRolesResponse { + // Roles of user. + repeated Role result = 1; +} + +message UserRolesStreamResponse { + // Roles of user. + Role result = 1; +} + +message SetUserRolesRequest { + // id of user + int64 user_id = 147132913; + repeated int64 body = 3029410; + // Requested fields. + string fields = 318677073; +} + +message SetUserRolesResponse { + // Roles of user. + repeated Role result = 1; +} + +message SetUserRolesStreamResponse { + // Roles of user. + Role result = 1; +} + +message UserAttributeUserValuesRequest { + // Id of user + int64 user_id = 147132913; + // Requested fields. + string fields = 318677073; + // Specific user attributes to request. Omit or leave blank to request all user attributes. + string user_attribute_ids = 468967351; + // If true, returns all values in the search path instead of just the first value found. Useful for debugging group precedence. + bool all_values = 402771552; + // If true, returns an empty record for each requested attribute that has no user, group, or default value. + bool include_unset = 384531147; +} + +message UserAttributeUserValuesResponse { + // Value of user attribute. + repeated UserAttributeWithValue result = 1; +} + +message UserAttributeUserValuesStreamResponse { + // Value of user attribute. + UserAttributeWithValue result = 1; +} + +message SetUserAttributeUserValueRequest { + // Id of user + int64 user_id = 147132913; + // Id of user attribute + int64 user_attribute_id = 534680475; + UserAttributeWithValue body = 3029410; +} + +message SetUserAttributeUserValueResponse { + // User attribute value. + UserAttributeWithValue result = 1; +} + +message DeleteUserAttributeUserValueRequest { + // Id of user + int64 user_id = 147132913; + // Id of user attribute + int64 user_attribute_id = 534680475; +} + +message DeleteUserAttributeUserValueResponse { + // Deleted +} + +message SendUserCredentialsEmailPasswordResetRequest { + // Id of user + int64 user_id = 147132913; + // Requested fields. + string fields = 318677073; +} + +message SendUserCredentialsEmailPasswordResetResponse { + // email/password credential + CredentialsEmail result = 1; +} + +message AllUserAttributesRequest { + // Requested fields. + string fields = 318677073; + // Fields to order the results by. Sortable fields include: name, label + string sorts = 109624981; +} + +message AllUserAttributesResponse { + // User Attribute + repeated UserAttribute result = 1; +} + +message AllUserAttributesStreamResponse { + // User Attribute + UserAttribute result = 1; +} + +message CreateUserAttributeRequest { + UserAttribute body = 3029410; + // Requested fields. + string fields = 318677073; +} + +message CreateUserAttributeResponse { + // User Attribute + UserAttribute result = 1; +} + +message UserAttributeRequest { + // Id of user attribute + int64 user_attribute_id = 534680475; + // Requested fields. + string fields = 318677073; +} + +message UserAttributeResponse { + // User Attribute + UserAttribute result = 1; +} + +message UpdateUserAttributeRequest { + // Id of user attribute + int64 user_attribute_id = 534680475; + UserAttribute body = 3029410; + // Requested fields. + string fields = 318677073; +} + +message UpdateUserAttributeResponse { + // User Attribute + UserAttribute result = 1; +} + +message DeleteUserAttributeRequest { + // Id of user_attribute + int64 user_attribute_id = 534680475; +} + +message DeleteUserAttributeResponse { + // Successfully deleted. + string result = 1; +} + +message AllUserAttributeGroupValuesRequest { + // Id of user attribute + int64 user_attribute_id = 534680475; + // Requested fields. + string fields = 318677073; +} + +message AllUserAttributeGroupValuesResponse { + // All group values for attribute. + repeated UserAttributeGroupValue result = 1; +} + +message AllUserAttributeGroupValuesStreamResponse { + // All group values for attribute. + UserAttributeGroupValue result = 1; +} + +message SetUserAttributeGroupValuesRequest { + // Id of user attribute + int64 user_attribute_id = 534680475; + repeated UserAttributeGroupValue body = 3029410; +} + +message SetUserAttributeGroupValuesResponse { + // Array of group values. + repeated UserAttributeGroupValue result = 1; +} + +message SetUserAttributeGroupValuesStreamResponse { + // Array of group values. + UserAttributeGroupValue result = 1; +} + +message AllWorkspacesRequest { + +} + +message AllWorkspacesResponse { + // Workspace + repeated Workspace result = 1; +} + +message AllWorkspacesStreamResponse { + // Workspace + Workspace result = 1; +} + +message WorkspaceRequest { + // Id of the workspace + string workspace_id = 394620993; +} + +message WorkspaceResponse { + // Workspace + Workspace result = 1; +} diff --git a/proto/grpc_proxy/src/main/proto/sdk/streams.proto b/proto/grpc_proxy/src/main/proto/sdk/streams.proto new file mode 100644 index 000000000..7f2cca89a --- /dev/null +++ b/proto/grpc_proxy/src/main/proto/sdk/streams.proto @@ -0,0 +1,3347 @@ +// MIT License +// +// Copyright (c) 2019 Looker Data Sciences, Inc. +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in all +// copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +// SOFTWARE. + +// 401 API methods + + +syntax = "proto3"; + +package looker; + +option java_package = "com.google.looker.grpc.services"; +option java_multiple_files = true; + +import 'sdk/models.proto'; + +service LookerStreamingService { + + // ApiAuth: API Authentication + + // ### Present client credentials to obtain an authorization token + // + // Looker API implements the OAuth2 [Resource Owner Password Credentials Grant](https://looker.com/docs/r/api/outh2_resource_owner_pc) pattern. + // The client credentials required for this login must be obtained by creating an API3 key on a user account + // in the Looker Admin console. The API3 key consists of a public `client_id` and a private `client_secret`. + // + // The access token returned by `login` must be used in the HTTP Authorization header of subsequent + // API requests, like this: + // ``` + // Authorization: token 4QDkCyCtZzYgj4C2p2cj3csJH7zqS5RzKs2kTnG4 + // ``` + // Replace "4QDkCy..." with the `access_token` value returned by `login`. + // The word `token` is a string literal and must be included exactly as shown. + // + // This function can accept `client_id` and `client_secret` parameters as URL query params or as www-form-urlencoded params in the body of the HTTP request. Since there is a small risk that URL parameters may be visible to intermediate nodes on the network route (proxies, routers, etc), passing credentials in the body of the request is considered more secure than URL params. + // + // Example of passing credentials in the HTTP request body: + // ```` + // POST HTTP /login + // Content-Type: application/x-www-form-urlencoded + // + // client_id=CGc9B7v7J48dQSJvxxx&client_secret=nNVS9cSS3xNpSC9JdsBvvvvv + // ```` + // + // ### Best Practice: + // Always pass credentials in body params. Pass credentials in URL query params **only** when you cannot pass body params due to application, tool, or other limitations. + // + // For more information and detailed examples of Looker API authorization, see [How to Authenticate to Looker API3](https://github.com/looker/looker-sdk-ruby/blob/master/authentication.md). + // + rpc Login(LoginRequest) returns (stream LoginResponse); + + // ### Create an access token that runs as a given user. + // + // This can only be called by an authenticated admin user. It allows that admin to generate a new + // authentication token for the user with the given user id. That token can then be used for subsequent + // API calls - which are then performed *as* that target user. + // + // The target user does *not* need to have a pre-existing API client_id/client_secret pair. And, no such + // credentials are created by this call. + // + // This allows for building systems where api user authentication for an arbitrary number of users is done + // outside of Looker and funneled through a single 'service account' with admin permissions. Note that a + // new access token is generated on each call. If target users are going to be making numerous API + // calls in a short period then it is wise to cache this authentication token rather than call this before + // each of those API calls. + // + // See 'login' for more detail on the access token and how to use it. + // + rpc LoginUser(LoginUserRequest) returns (stream LoginUserResponse); + + // ### Logout of the API and invalidate the current access token. + // + rpc Logout(LogoutRequest) returns (stream LogoutResponse); + + + + // Auth: Manage User Authentication Configuration + + // ### Create SSO Embed URL + // + // Creates an SSO embed URL and cryptographically signs it with an embed secret. + // This signed URL can then be used to instantiate a Looker embed session in a PBL web application. + // Do not make any modifications to this URL - any change may invalidate the signature and + // cause the URL to fail to load a Looker embed session. + // + // A signed SSO embed URL can only be used once. After it has been used to request a page from the + // Looker server, the URL is invalid. Future requests using the same URL will fail. This is to prevent + // 'replay attacks'. + // + // The `target_url` property must be a complete URL of a Looker UI page - scheme, hostname, path and query params. + // To load a dashboard with id 56 and with a filter of `Date=1 years`, the looker URL would look like `https:/myname.looker.com/dashboards/56?Date=1%20years`. + // The best way to obtain this target_url is to navigate to the desired Looker page in your web browser, + // copy the URL shown in the browser address bar and paste it into the `target_url` property as a quoted string value in this API request. + // + // Permissions for the embed user are defined by the groups in which the embed user is a member (group_ids property) + // and the lists of models and permissions assigned to the embed user. + // At a minimum, you must provide values for either the group_ids property, or both the models and permissions properties. + // These properties are additive; an embed user can be a member of certain groups AND be granted access to models and permissions. + // + // The embed user's access is the union of permissions granted by the group_ids, models, and permissions properties. + // + // This function does not strictly require all group_ids, user attribute names, or model names to exist at the moment the + // SSO embed url is created. Unknown group_id, user attribute names or model names will be passed through to the output URL. + // To diagnose potential problems with an SSO embed URL, you can copy the signed URL into the Embed URI Validator text box in `/admin/embed`. + // + // The `secret_id` parameter is optional. If specified, its value must be the id of an active secret defined in the Looker instance. + // if not specified, the URL will be signed using the newest active secret defined in the Looker instance. + // + // #### Security Note + // Protect this signed URL as you would an access token or password credentials - do not write + // it to disk, do not pass it to a third party, and only pass it through a secure HTTPS + // encrypted transport. + // + rpc CreateSsoEmbedUrl(CreateSsoEmbedUrlRequest) returns (stream CreateSsoEmbedUrlResponse); + + // ### Create an Embed URL + // + // Creates an embed URL that runs as the Looker user making this API call. ("Embed as me") + // This embed URL can then be used to instantiate a Looker embed session in a + // "Powered by Looker" (PBL) web application. + // + // This is similar to Private Embedding (https://docs.looker.com/r/admin/embed/private-embed). Instead of + // of logging into the Web UI to authenticate, the user has already authenticated against the API to be able to + // make this call. However, unlike Private Embed where the user has access to any other part of the Looker UI, + // the embed web session created by requesting the EmbedUrlResponse.url in a browser only has access to + // content visible under the `/embed` context. + // + // An embed URL can only be used once, and must be used within 5 minutes of being created. After it + // has been used to request a page from the Looker server, the URL is invalid. Future requests using + // the same URL will fail. This is to prevent 'replay attacks'. + // + // The `target_url` property must be a complete URL of a Looker Embedded UI page - scheme, hostname, path starting with "/embed" and query params. + // To load a dashboard with id 56 and with a filter of `Date=1 years`, the looker Embed URL would look like `https://myname.looker.com/embed/dashboards/56?Date=1%20years`. + // The best way to obtain this target_url is to navigate to the desired Looker page in your web browser, + // copy the URL shown in the browser address bar, insert "/embed" after the host/port, and paste it into the `target_url` property as a quoted string value in this API request. + // + // #### Security Note + // Protect this embed URL as you would an access token or password credentials - do not write + // it to disk, do not pass it to a third party, and only pass it through a secure HTTPS + // encrypted transport. + // + rpc CreateEmbedUrlAsMe(CreateEmbedUrlAsMeRequest) returns (stream CreateEmbedUrlAsMeResponse); + + // ### Get the LDAP configuration. + // + // Looker can be optionally configured to authenticate users against an Active Directory or other LDAP directory server. + // LDAP setup requires coordination with an administrator of that directory server. + // + // Only Looker administrators can read and update the LDAP configuration. + // + // Configuring LDAP impacts authentication for all users. This configuration should be done carefully. + // + // Looker maintains a single LDAP configuration. It can be read and updated. Updates only succeed if the new state will be valid (in the sense that all required fields are populated); it is up to you to ensure that the configuration is appropriate and correct). + // + // LDAP is enabled or disabled for Looker using the **enabled** field. + // + // Looker will never return an **auth_password** field. That value can be set, but never retrieved. + // + // See the [Looker LDAP docs](https://www.looker.com/docs/r/api/ldap_setup) for additional information. + // + rpc LdapConfig(LdapConfigRequest) returns (stream LdapConfigResponse); + + // ### Update the LDAP configuration. + // + // Configuring LDAP impacts authentication for all users. This configuration should be done carefully. + // + // Only Looker administrators can read and update the LDAP configuration. + // + // LDAP is enabled or disabled for Looker using the **enabled** field. + // + // It is **highly** recommended that any LDAP setting changes be tested using the APIs below before being set globally. + // + // See the [Looker LDAP docs](https://www.looker.com/docs/r/api/ldap_setup) for additional information. + // + rpc UpdateLdapConfig(UpdateLdapConfigRequest) returns (stream UpdateLdapConfigResponse); + + // ### Test the connection settings for an LDAP configuration. + // + // This tests that the connection is possible given a connection_host and connection_port. + // + // **connection_host** and **connection_port** are required. **connection_tls** is optional. + // + // Example: + // ```json + // { + // "connection_host": "ldap.example.com", + // "connection_port": "636", + // "connection_tls": true + // } + // ``` + // + // No authentication to the LDAP server is attempted. + // + // The active LDAP settings are not modified. + // + rpc TestLdapConfigConnection(TestLdapConfigConnectionRequest) returns (stream TestLdapConfigConnectionResponse); + + // ### Test the connection authentication settings for an LDAP configuration. + // + // This tests that the connection is possible and that a 'server' account to be used by Looker can authenticate to the LDAP server given connection and authentication information. + // + // **connection_host**, **connection_port**, and **auth_username**, are required. **connection_tls** and **auth_password** are optional. + // + // Example: + // ```json + // { + // "connection_host": "ldap.example.com", + // "connection_port": "636", + // "connection_tls": true, + // "auth_username": "cn=looker,dc=example,dc=com", + // "auth_password": "secret" + // } + // ``` + // + // Looker will never return an **auth_password**. If this request omits the **auth_password** field, then the **auth_password** value from the active config (if present) will be used for the test. + // + // The active LDAP settings are not modified. + // + // + rpc TestLdapConfigAuth(TestLdapConfigAuthRequest) returns (stream TestLdapConfigAuthResponse); + + // ### Test the user authentication settings for an LDAP configuration without authenticating the user. + // + // This test will let you easily test the mapping for user properties and roles for any user without needing to authenticate as that user. + // + // This test accepts a full LDAP configuration along with a username and attempts to find the full info for the user from the LDAP server without actually authenticating the user. So, user password is not required.The configuration is validated before attempting to contact the server. + // + // **test_ldap_user** is required. + // + // The active LDAP settings are not modified. + // + // + rpc TestLdapConfigUserInfo(TestLdapConfigUserInfoRequest) returns (stream TestLdapConfigUserInfoResponse); + + // ### Test the user authentication settings for an LDAP configuration. + // + // This test accepts a full LDAP configuration along with a username/password pair and attempts to authenticate the user with the LDAP server. The configuration is validated before attempting the authentication. + // + // Looker will never return an **auth_password**. If this request omits the **auth_password** field, then the **auth_password** value from the active config (if present) will be used for the test. + // + // **test_ldap_user** and **test_ldap_password** are required. + // + // The active LDAP settings are not modified. + // + // + rpc TestLdapConfigUserAuth(TestLdapConfigUserAuthRequest) returns (stream TestLdapConfigUserAuthResponse); + + // ### List All OAuth Client Apps + // + // Lists all applications registered to use OAuth2 login with this Looker instance, including + // enabled and disabled apps. + // + // Results are filtered to include only the apps that the caller (current user) + // has permission to see. + // + rpc AllOauthClientApps(AllOauthClientAppsRequest) returns (stream AllOauthClientAppsStreamResponse); + + // ### Get Oauth Client App + // + // Returns the registered app client with matching client_guid. + // + rpc OauthClientApp(OauthClientAppRequest) returns (stream OauthClientAppResponse); + + // ### Register an OAuth2 Client App + // + // Registers details identifying an external web app or native app as an OAuth2 login client of the Looker instance. + // The app registration must provide a unique client_guid and redirect_uri that the app will present + // in OAuth login requests. If the client_guid and redirect_uri parameters in the login request do not match + // the app details registered with the Looker instance, the request is assumed to be a forgery and is rejected. + // + rpc RegisterOauthClientApp(RegisterOauthClientAppRequest) returns (stream RegisterOauthClientAppResponse); + + // ### Update OAuth2 Client App Details + // + // Modifies the details a previously registered OAuth2 login client app. + // + rpc UpdateOauthClientApp(UpdateOauthClientAppRequest) returns (stream UpdateOauthClientAppResponse); + + // ### Delete OAuth Client App + // + // Deletes the registration info of the app with the matching client_guid. + // All active sessions and tokens issued for this app will immediately become invalid. + // + // ### Note: this deletion cannot be undone. + // + rpc DeleteOauthClientApp(DeleteOauthClientAppRequest) returns (stream DeleteOauthClientAppResponse); + + // ### Invalidate All Issued Tokens + // + // Immediately invalidates all auth codes, sessions, access tokens and refresh tokens issued for + // this app for ALL USERS of this app. + // + rpc InvalidateTokens(InvalidateTokensRequest) returns (stream InvalidateTokensResponse); + + // ### Activate an app for a user + // + // Activates a user for a given oauth client app. This indicates the user has been informed that + // the app will have access to the user's looker data, and that the user has accepted and allowed + // the app to use their Looker account. + // + // Activating a user for an app that the user is already activated with returns a success response. + // + rpc ActivateAppUser(ActivateAppUserRequest) returns (stream ActivateAppUserResponse); + + // ### Deactivate an app for a user + // + // Deactivate a user for a given oauth client app. All tokens issued to the app for + // this user will be invalid immediately. Before the user can use the app with their + // Looker account, the user will have to read and accept an account use disclosure statement for the app. + // + // Admin users can deactivate other users, but non-admin users can only deactivate themselves. + // + // As with most REST DELETE operations, this endpoint does not return an error if the indicated + // resource (app or user) does not exist or has already been deactivated. + // + rpc DeactivateAppUser(DeactivateAppUserRequest) returns (stream DeactivateAppUserResponse); + + // ### Get the OIDC configuration. + // + // Looker can be optionally configured to authenticate users against an OpenID Connect (OIDC) + // authentication server. OIDC setup requires coordination with an administrator of that server. + // + // Only Looker administrators can read and update the OIDC configuration. + // + // Configuring OIDC impacts authentication for all users. This configuration should be done carefully. + // + // Looker maintains a single OIDC configuation. It can be read and updated. Updates only succeed if the new state will be valid (in the sense that all required fields are populated); it is up to you to ensure that the configuration is appropriate and correct). + // + // OIDC is enabled or disabled for Looker using the **enabled** field. + // + rpc OidcConfig(OidcConfigRequest) returns (stream OidcConfigResponse); + + // ### Update the OIDC configuration. + // + // Configuring OIDC impacts authentication for all users. This configuration should be done carefully. + // + // Only Looker administrators can read and update the OIDC configuration. + // + // OIDC is enabled or disabled for Looker using the **enabled** field. + // + // It is **highly** recommended that any OIDC setting changes be tested using the APIs below before being set globally. + // + rpc UpdateOidcConfig(UpdateOidcConfigRequest) returns (stream UpdateOidcConfigResponse); + + // ### Get a OIDC test configuration by test_slug. + // + rpc OidcTestConfig(OidcTestConfigRequest) returns (stream OidcTestConfigResponse); + + // ### Delete a OIDC test configuration. + // + rpc DeleteOidcTestConfig(DeleteOidcTestConfigRequest) returns (stream DeleteOidcTestConfigResponse); + + // ### Create a OIDC test configuration. + // + rpc CreateOidcTestConfig(CreateOidcTestConfigRequest) returns (stream CreateOidcTestConfigResponse); + + // ### Get password config. + // + rpc PasswordConfig(PasswordConfigRequest) returns (stream PasswordConfigResponse); + + // ### Update password config. + // + rpc UpdatePasswordConfig(UpdatePasswordConfigRequest) returns (stream UpdatePasswordConfigResponse); + + // ### Force all credentials_email users to reset their login passwords upon their next login. + // + rpc ForcePasswordResetAtNextLoginForAllUsers(ForcePasswordResetAtNextLoginForAllUsersRequest) returns (stream ForcePasswordResetAtNextLoginForAllUsersResponse); + + // ### Get the SAML configuration. + // + // Looker can be optionally configured to authenticate users against a SAML authentication server. + // SAML setup requires coordination with an administrator of that server. + // + // Only Looker administrators can read and update the SAML configuration. + // + // Configuring SAML impacts authentication for all users. This configuration should be done carefully. + // + // Looker maintains a single SAML configuation. It can be read and updated. Updates only succeed if the new state will be valid (in the sense that all required fields are populated); it is up to you to ensure that the configuration is appropriate and correct). + // + // SAML is enabled or disabled for Looker using the **enabled** field. + // + rpc SamlConfig(SamlConfigRequest) returns (stream SamlConfigResponse); + + // ### Update the SAML configuration. + // + // Configuring SAML impacts authentication for all users. This configuration should be done carefully. + // + // Only Looker administrators can read and update the SAML configuration. + // + // SAML is enabled or disabled for Looker using the **enabled** field. + // + // It is **highly** recommended that any SAML setting changes be tested using the APIs below before being set globally. + // + rpc UpdateSamlConfig(UpdateSamlConfigRequest) returns (stream UpdateSamlConfigResponse); + + // ### Get a SAML test configuration by test_slug. + // + rpc SamlTestConfig(SamlTestConfigRequest) returns (stream SamlTestConfigResponse); + + // ### Delete a SAML test configuration. + // + rpc DeleteSamlTestConfig(DeleteSamlTestConfigRequest) returns (stream DeleteSamlTestConfigResponse); + + // ### Create a SAML test configuration. + // + rpc CreateSamlTestConfig(CreateSamlTestConfigRequest) returns (stream CreateSamlTestConfigResponse); + + // ### Parse the given xml as a SAML IdP metadata document and return the result. + // + rpc ParseSamlIdpMetadata(ParseSamlIdpMetadataRequest) returns (stream ParseSamlIdpMetadataResponse); + + // ### Fetch the given url and parse it as a SAML IdP metadata document and return the result. + // Note that this requires that the url be public or at least at a location where the Looker instance + // can fetch it without requiring any special authentication. + // + rpc FetchAndParseSamlIdpMetadata(FetchAndParseSamlIdpMetadataRequest) returns (stream FetchAndParseSamlIdpMetadataResponse); + + // ### Get session config. + // + rpc SessionConfig(SessionConfigRequest) returns (stream SessionConfigResponse); + + // ### Update session config. + // + rpc UpdateSessionConfig(UpdateSessionConfigRequest) returns (stream UpdateSessionConfigResponse); + + // ### Get currently locked-out users. + // + rpc AllUserLoginLockouts(AllUserLoginLockoutsRequest) returns (stream AllUserLoginLockoutsStreamResponse); + + // ### Search currently locked-out users. + // + rpc SearchUserLoginLockouts(SearchUserLoginLockoutsRequest) returns (stream SearchUserLoginLockoutsStreamResponse); + + // ### Removes login lockout for the associated user. + // + rpc DeleteUserLoginLockout(DeleteUserLoginLockoutRequest) returns (stream DeleteUserLoginLockoutResponse); + + + + // Board: Manage Boards + + // ### Get information about all boards. + // + rpc AllBoards(AllBoardsRequest) returns (stream AllBoardsStreamResponse); + + // ### Create a new board. + // + rpc CreateBoard(CreateBoardRequest) returns (stream CreateBoardResponse); + + // ### Search Boards + // + // If multiple search params are given and `filter_or` is FALSE or not specified, + // search params are combined in a logical AND operation. + // Only rows that match *all* search param criteria will be returned. + // + // If `filter_or` is TRUE, multiple search params are combined in a logical OR operation. + // Results will include rows that match **any** of the search criteria. + // + // String search params use case-insensitive matching. + // String search params can contain `%` and '_' as SQL LIKE pattern match wildcard expressions. + // example="dan%" will match "danger" and "Danzig" but not "David" + // example="D_m%" will match "Damage" and "dump" + // + // Integer search params can accept a single value or a comma separated list of values. The multiple + // values will be combined under a logical OR operation - results will match at least one of + // the given values. + // + // Most search params can accept "IS NULL" and "NOT NULL" as special expressions to match + // or exclude (respectively) rows where the column is null. + // + // Boolean search params accept only "true" and "false" as values. + // + // + rpc SearchBoards(SearchBoardsRequest) returns (stream SearchBoardsStreamResponse); + + // ### Get information about a board. + // + rpc Board(BoardRequest) returns (stream BoardResponse); + + // ### Update a board definition. + // + rpc UpdateBoard(UpdateBoardRequest) returns (stream UpdateBoardResponse); + + // ### Delete a board. + // + rpc DeleteBoard(DeleteBoardRequest) returns (stream DeleteBoardResponse); + + // ### Get information about all board items. + // + rpc AllBoardItems(AllBoardItemsRequest) returns (stream AllBoardItemsStreamResponse); + + // ### Create a new board item. + // + rpc CreateBoardItem(CreateBoardItemRequest) returns (stream CreateBoardItemResponse); + + // ### Get information about a board item. + // + rpc BoardItem(BoardItemRequest) returns (stream BoardItemResponse); + + // ### Update a board item definition. + // + rpc UpdateBoardItem(UpdateBoardItemRequest) returns (stream UpdateBoardItemResponse); + + // ### Delete a board item. + // + rpc DeleteBoardItem(DeleteBoardItemRequest) returns (stream DeleteBoardItemResponse); + + // ### Get information about all board sections. + // + rpc AllBoardSections(AllBoardSectionsRequest) returns (stream AllBoardSectionsStreamResponse); + + // ### Create a new board section. + // + rpc CreateBoardSection(CreateBoardSectionRequest) returns (stream CreateBoardSectionResponse); + + // ### Get information about a board section. + // + rpc BoardSection(BoardSectionRequest) returns (stream BoardSectionResponse); + + // ### Update a board section definition. + // + rpc UpdateBoardSection(UpdateBoardSectionRequest) returns (stream UpdateBoardSectionResponse); + + // ### Delete a board section. + // + rpc DeleteBoardSection(DeleteBoardSectionRequest) returns (stream DeleteBoardSectionResponse); + + + + // ColorCollection: Manage Color Collections + + // ### Get an array of all existing Color Collections + // Get a **single** color collection by id with [ColorCollection](#!/ColorCollection/color_collection) + // + // Get all **standard** color collections with [ColorCollection](#!/ColorCollection/color_collections_standard) + // + // Get all **custom** color collections with [ColorCollection](#!/ColorCollection/color_collections_custom) + // + // **Note**: Only an API user with the Admin role can call this endpoint. Unauthorized requests will return `Not Found` (404) errors. + // + // + rpc AllColorCollections(AllColorCollectionsRequest) returns (stream AllColorCollectionsStreamResponse); + + // ### Create a custom color collection with the specified information + // + // Creates a new custom color collection object, returning the details, including the created id. + // + // **Update** an existing color collection with [Update Color Collection](#!/ColorCollection/update_color_collection) + // + // **Permanently delete** an existing custom color collection with [Delete Color Collection](#!/ColorCollection/delete_color_collection) + // + // **Note**: Only an API user with the Admin role can call this endpoint. Unauthorized requests will return `Not Found` (404) errors. + // + // + rpc CreateColorCollection(CreateColorCollectionRequest) returns (stream CreateColorCollectionResponse); + + // ### Get an array of all existing **Custom** Color Collections + // Get a **single** color collection by id with [ColorCollection](#!/ColorCollection/color_collection) + // + // Get all **standard** color collections with [ColorCollection](#!/ColorCollection/color_collections_standard) + // + // **Note**: Only an API user with the Admin role can call this endpoint. Unauthorized requests will return `Not Found` (404) errors. + // + // + rpc ColorCollectionsCustom(ColorCollectionsCustomRequest) returns (stream ColorCollectionsCustomStreamResponse); + + // ### Get an array of all existing **Standard** Color Collections + // Get a **single** color collection by id with [ColorCollection](#!/ColorCollection/color_collection) + // + // Get all **custom** color collections with [ColorCollection](#!/ColorCollection/color_collections_custom) + // + // **Note**: Only an API user with the Admin role can call this endpoint. Unauthorized requests will return `Not Found` (404) errors. + // + // + rpc ColorCollectionsStandard(ColorCollectionsStandardRequest) returns (stream ColorCollectionsStandardStreamResponse); + + // ### Get the default color collection + // + // Use this to retrieve the default Color Collection. + // + // Set the default color collection with [ColorCollection](#!/ColorCollection/set_default_color_collection) + // + rpc DefaultColorCollection(DefaultColorCollectionRequest) returns (stream DefaultColorCollectionResponse); + + // ### Set the global default Color Collection by ID + // + // Returns the new specified default Color Collection object. + // **Note**: Only an API user with the Admin role can call this endpoint. Unauthorized requests will return `Not Found` (404) errors. + // + // + rpc SetDefaultColorCollection(SetDefaultColorCollectionRequest) returns (stream SetDefaultColorCollectionResponse); + + // ### Get a Color Collection by ID + // + // Use this to retrieve a specific Color Collection. + // Get a **single** color collection by id with [ColorCollection](#!/ColorCollection/color_collection) + // + // Get all **standard** color collections with [ColorCollection](#!/ColorCollection/color_collections_standard) + // + // Get all **custom** color collections with [ColorCollection](#!/ColorCollection/color_collections_custom) + // + // **Note**: Only an API user with the Admin role can call this endpoint. Unauthorized requests will return `Not Found` (404) errors. + // + // + rpc ColorCollection(ColorCollectionRequest) returns (stream ColorCollectionResponse); + + // ### Update a custom color collection by id. + // **Note**: Only an API user with the Admin role can call this endpoint. Unauthorized requests will return `Not Found` (404) errors. + // + // + rpc UpdateColorCollection(UpdateColorCollectionRequest) returns (stream UpdateColorCollectionResponse); + + // ### Delete a custom color collection by id + // + // This operation permanently deletes the identified **Custom** color collection. + // + // **Standard** color collections cannot be deleted + // + // Because multiple color collections can have the same label, they must be deleted by ID, not name. + // **Note**: Only an API user with the Admin role can call this endpoint. Unauthorized requests will return `Not Found` (404) errors. + // + // + rpc DeleteColorCollection(DeleteColorCollectionRequest) returns (stream DeleteColorCollectionResponse); + + + + // Command: Manage Commands + + // ### Get All Commands. + // + rpc GetAllCommands(GetAllCommandsRequest) returns (stream GetAllCommandsStreamResponse); + + // ### Create a new command. + // # Required fields: [:name, :linked_content_id, :linked_content_type] + // # `linked_content_type` must be one of ["dashboard", "lookml_dashboard"] + // # + // + rpc CreateCommand(CreateCommandRequest) returns (stream CreateCommandResponse); + + // ### Update an existing custom command. + // # Optional fields: ['name', 'description'] + // # + // + rpc UpdateCommand(UpdateCommandRequest) returns (stream UpdateCommandResponse); + + // ### Delete an existing custom command. + // + rpc DeleteCommand(DeleteCommandRequest) returns (stream DeleteCommandResponse); + + + + // Config: Manage General Configuration + + // Get the current Cloud Storage Configuration. + // + rpc CloudStorageConfiguration(CloudStorageConfigurationRequest) returns (stream CloudStorageConfigurationResponse); + + // Update the current Cloud Storage Configuration. + // + rpc UpdateCloudStorageConfiguration(UpdateCloudStorageConfigurationRequest) returns (stream UpdateCloudStorageConfigurationResponse); + + // ### Get the current status and content of custom welcome emails + // + rpc CustomWelcomeEmail(CustomWelcomeEmailRequest) returns (stream CustomWelcomeEmailResponse); + + // Update custom welcome email setting and values. Optionally send a test email with the new content to the currently logged in user. + // + rpc UpdateCustomWelcomeEmail(UpdateCustomWelcomeEmailRequest) returns (stream UpdateCustomWelcomeEmailResponse); + + // Requests to this endpoint will send a welcome email with the custom content provided in the body to the currently logged in user. + // + rpc UpdateCustomWelcomeEmailTest(UpdateCustomWelcomeEmailTestRequest) returns (stream UpdateCustomWelcomeEmailTestResponse); + + // ### Retrieve the value for whether or not digest emails is enabled + // + rpc DigestEmailsEnabled(DigestEmailsEnabledRequest) returns (stream DigestEmailsEnabledResponse); + + // ### Update the setting for enabling/disabling digest emails + // + rpc UpdateDigestEmailsEnabled(UpdateDigestEmailsEnabledRequest) returns (stream UpdateDigestEmailsEnabledResponse); + + // ### Trigger the generation of digest email records and send them to Looker's internal system. This does not send + // any actual emails, it generates records containing content which may be of interest for users who have become inactive. + // Emails will be sent at a later time from Looker's internal system if the Digest Emails feature is enabled in settings. + rpc CreateDigestEmailSend(CreateDigestEmailSendRequest) returns (stream CreateDigestEmailSendResponse); + + // ### Set the menu item name and content for internal help resources + // + rpc InternalHelpResourcesContent(InternalHelpResourcesContentRequest) returns (stream InternalHelpResourcesContentResponse); + + // Update internal help resources content + // + rpc UpdateInternalHelpResourcesContent(UpdateInternalHelpResourcesContentRequest) returns (stream UpdateInternalHelpResourcesContentResponse); + + // ### Get and set the options for internal help resources + // + rpc InternalHelpResources(InternalHelpResourcesRequest) returns (stream InternalHelpResourcesResponse); + + // Update internal help resources settings + // + rpc UpdateInternalHelpResources(UpdateInternalHelpResourcesRequest) returns (stream UpdateInternalHelpResourcesResponse); + + // ### Get all legacy features. + // + rpc AllLegacyFeatures(AllLegacyFeaturesRequest) returns (stream AllLegacyFeaturesStreamResponse); + + // ### Get information about the legacy feature with a specific id. + // + rpc LegacyFeature(LegacyFeatureRequest) returns (stream LegacyFeatureResponse); + + // ### Update information about the legacy feature with a specific id. + // + rpc UpdateLegacyFeature(UpdateLegacyFeatureRequest) returns (stream UpdateLegacyFeatureResponse); + + // ### Get a list of locales that Looker supports. + // + rpc AllLocales(AllLocalesRequest) returns (stream AllLocalesStreamResponse); + + // ### Get a list of timezones that Looker supports (e.g. useful for scheduling tasks). + // + rpc AllTimezones(AllTimezonesRequest) returns (stream AllTimezonesStreamResponse); + + // ### Get information about all API versions supported by this Looker instance. + // + rpc Versions(VersionsRequest) returns (stream VersionsResponse); + + // ### This feature is enabled only by special license. + // ### Gets the whitelabel configuration, which includes hiding documentation links, custom favicon uploading, etc. + // + rpc WhitelabelConfiguration(WhitelabelConfigurationRequest) returns (stream WhitelabelConfigurationResponse); + + // ### Update the whitelabel configuration + // + rpc UpdateWhitelabelConfiguration(UpdateWhitelabelConfigurationRequest) returns (stream UpdateWhitelabelConfigurationResponse); + + + + // Connection: Manage Database Connections + + // ### Get information about all connections. + // + rpc AllConnections(AllConnectionsRequest) returns (stream AllConnectionsStreamResponse); + + // ### Create a connection using the specified configuration. + // + rpc CreateConnection(CreateConnectionRequest) returns (stream CreateConnectionResponse); + + // ### Get information about a connection. + // + rpc Connection(ConnectionRequest) returns (stream ConnectionResponse); + + // ### Update a connection using the specified configuration. + // + rpc UpdateConnection(UpdateConnectionRequest) returns (stream UpdateConnectionResponse); + + // ### Delete a connection. + // + rpc DeleteConnection(DeleteConnectionRequest) returns (stream DeleteConnectionResponse); + + // ### Delete a connection override. + // + rpc DeleteConnectionOverride(DeleteConnectionOverrideRequest) returns (stream DeleteConnectionOverrideResponse); + + // ### Test an existing connection. + // + // Note that a connection's 'dialect' property has a 'connection_tests' property that lists the + // specific types of tests that the connection supports. + // + // This API is rate limited. + // + // Unsupported tests in the request will be ignored. + // + rpc TestConnection(TestConnectionRequest) returns (stream TestConnectionStreamResponse); + + // ### Test a connection configuration. + // + // Note that a connection's 'dialect' property has a 'connection_tests' property that lists the + // specific types of tests that the connection supports. + // + // This API is rate limited. + // + // Unsupported tests in the request will be ignored. + // + rpc TestConnectionConfig(TestConnectionConfigRequest) returns (stream TestConnectionConfigStreamResponse); + + // ### Get information about all dialects. + // + rpc AllDialectInfos(AllDialectInfosRequest) returns (stream AllDialectInfosStreamResponse); + + // ### Get all External OAuth Applications. + // + rpc AllExternalOauthApplications(AllExternalOauthApplicationsRequest) returns (stream AllExternalOauthApplicationsStreamResponse); + + // ### Create an OAuth Application using the specified configuration. + // + rpc CreateExternalOauthApplication(CreateExternalOauthApplicationRequest) returns (stream CreateExternalOauthApplicationResponse); + + // ### Get information about all SSH Servers. + // + rpc AllSshServers(AllSshServersRequest) returns (stream AllSshServersStreamResponse); + + // ### Create an SSH Server. + // + rpc CreateSshServer(CreateSshServerRequest) returns (stream CreateSshServerResponse); + + // ### Get information about an SSH Server. + // + rpc SshServer(SshServerRequest) returns (stream SshServerResponse); + + // ### Update an SSH Server. + // + rpc UpdateSshServer(UpdateSshServerRequest) returns (stream UpdateSshServerResponse); + + // ### Delete an SSH Server. + // + rpc DeleteSshServer(DeleteSshServerRequest) returns (stream DeleteSshServerResponse); + + // ### Test the SSH Server + // + rpc TestSshServer(TestSshServerRequest) returns (stream TestSshServerResponse); + + // ### Get information about all SSH Tunnels. + // + rpc AllSshTunnels(AllSshTunnelsRequest) returns (stream AllSshTunnelsStreamResponse); + + // ### Create an SSH Tunnel + // + rpc CreateSshTunnel(CreateSshTunnelRequest) returns (stream CreateSshTunnelResponse); + + // ### Get information about an SSH Tunnel. + // + rpc SshTunnel(SshTunnelRequest) returns (stream SshTunnelResponse); + + // ### Update an SSH Tunnel + // + rpc UpdateSshTunnel(UpdateSshTunnelRequest) returns (stream UpdateSshTunnelResponse); + + // ### Delete an SSH Tunnel + // + rpc DeleteSshTunnel(DeleteSshTunnelRequest) returns (stream DeleteSshTunnelResponse); + + // ### Test the SSH Tunnel + // + rpc TestSshTunnel(TestSshTunnelRequest) returns (stream TestSshTunnelResponse); + + // ### Get the SSH public key + // + // Get the public key created for this instance to identify itself to a remote SSH server. + // + rpc SshPublicKey(SshPublicKeyRequest) returns (stream SshPublicKeyResponse); + + + + // Content: Manage Content + + // ### Search Favorite Content + // + // If multiple search params are given and `filter_or` is FALSE or not specified, + // search params are combined in a logical AND operation. + // Only rows that match *all* search param criteria will be returned. + // + // If `filter_or` is TRUE, multiple search params are combined in a logical OR operation. + // Results will include rows that match **any** of the search criteria. + // + // String search params use case-insensitive matching. + // String search params can contain `%` and '_' as SQL LIKE pattern match wildcard expressions. + // example="dan%" will match "danger" and "Danzig" but not "David" + // example="D_m%" will match "Damage" and "dump" + // + // Integer search params can accept a single value or a comma separated list of values. The multiple + // values will be combined under a logical OR operation - results will match at least one of + // the given values. + // + // Most search params can accept "IS NULL" and "NOT NULL" as special expressions to match + // or exclude (respectively) rows where the column is null. + // + // Boolean search params accept only "true" and "false" as values. + // + // + rpc SearchContentFavorites(SearchContentFavoritesRequest) returns (stream SearchContentFavoritesStreamResponse); + + // ### Get favorite content by its id + rpc ContentFavorite(ContentFavoriteRequest) returns (stream ContentFavoriteResponse); + + // ### Delete favorite content + rpc DeleteContentFavorite(DeleteContentFavoriteRequest) returns (stream DeleteContentFavoriteResponse); + + // ### Create favorite content + rpc CreateContentFavorite(CreateContentFavoriteRequest) returns (stream CreateContentFavoriteResponse); + + // ### Get information about all content metadata in a space. + // + rpc AllContentMetadatas(AllContentMetadatasRequest) returns (stream AllContentMetadatasStreamResponse); + + // ### Get information about an individual content metadata record. + // + rpc ContentMetadata(ContentMetadataRequest) returns (stream ContentMetadataResponse); + + // ### Move a piece of content. + // + rpc UpdateContentMetadata(UpdateContentMetadataRequest) returns (stream UpdateContentMetadataResponse); + + // ### All content metadata access records for a content metadata item. + // + rpc AllContentMetadataAccesses(AllContentMetadataAccessesRequest) returns (stream AllContentMetadataAccessesStreamResponse); + + // ### Create content metadata access. + // + rpc CreateContentMetadataAccess(CreateContentMetadataAccessRequest) returns (stream CreateContentMetadataAccessResponse); + + // ### Update type of access for content metadata. + // + rpc UpdateContentMetadataAccess(UpdateContentMetadataAccessRequest) returns (stream UpdateContentMetadataAccessResponse); + + // ### Remove content metadata access. + // + rpc DeleteContentMetadataAccess(DeleteContentMetadataAccessRequest) returns (stream DeleteContentMetadataAccessResponse); + + // ### Get an image representing the contents of a dashboard or look. + // + // The returned thumbnail is an abstract representation of the contents of a dashbord or look and does not + // reflect the actual data displayed in the respective visualizations. + // + rpc ContentThumbnail(ContentThumbnailRequest) returns (stream ContentThumbnailResponse); + + // ### Validate All Content + // + // Performs validation of all looks and dashboards + // Returns a list of errors found as well as metadata about the content validation run. + // + rpc ContentValidation(ContentValidationRequest) returns (stream ContentValidationResponse); + + // ### Search Content Views + // + // If multiple search params are given and `filter_or` is FALSE or not specified, + // search params are combined in a logical AND operation. + // Only rows that match *all* search param criteria will be returned. + // + // If `filter_or` is TRUE, multiple search params are combined in a logical OR operation. + // Results will include rows that match **any** of the search criteria. + // + // String search params use case-insensitive matching. + // String search params can contain `%` and '_' as SQL LIKE pattern match wildcard expressions. + // example="dan%" will match "danger" and "Danzig" but not "David" + // example="D_m%" will match "Damage" and "dump" + // + // Integer search params can accept a single value or a comma separated list of values. The multiple + // values will be combined under a logical OR operation - results will match at least one of + // the given values. + // + // Most search params can accept "IS NULL" and "NOT NULL" as special expressions to match + // or exclude (respectively) rows where the column is null. + // + // Boolean search params accept only "true" and "false" as values. + // + // + rpc SearchContentViews(SearchContentViewsRequest) returns (stream SearchContentViewsStreamResponse); + + // ### Get a vector image representing the contents of a dashboard or look. + // + // # DEPRECATED: Use [content_thumbnail()](#!/Content/content_thumbnail) + // + // The returned thumbnail is an abstract representation of the contents of a dashbord or look and does not + // reflect the actual data displayed in the respective visualizations. + // + rpc VectorThumbnail(VectorThumbnailRequest) returns (stream VectorThumbnailResponse); + + + + // Dashboard: Manage Dashboards + + // ### Get information about all active dashboards. + // + // Returns an array of **abbreviated dashboard objects**. Dashboards marked as deleted are excluded from this list. + // + // Get the **full details** of a specific dashboard by id with [dashboard()](#!/Dashboard/dashboard) + // + // Find **deleted dashboards** with [search_dashboards()](#!/Dashboard/search_dashboards) + // + rpc AllDashboards(AllDashboardsRequest) returns (stream AllDashboardsStreamResponse); + + // ### Create a new dashboard + // + // Creates a new dashboard object and returns the details of the newly created dashboard. + // + // `Title`, `user_id`, and `space_id` are all required fields. + // `Space_id` and `user_id` must contain the id of an existing space or user, respectively. + // A dashboard's `title` must be unique within the space in which it resides. + // + // If you receive a 422 error response when creating a dashboard, be sure to look at the + // response body for information about exactly which fields are missing or contain invalid data. + // + // You can **update** an existing dashboard with [update_dashboard()](#!/Dashboard/update_dashboard) + // + // You can **permanently delete** an existing dashboard with [delete_dashboard()](#!/Dashboard/delete_dashboard) + // + rpc CreateDashboard(CreateDashboardRequest) returns (stream CreateDashboardResponse); + + // ### Search Dashboards + // + // Returns an **array of dashboard objects** that match the specified search criteria. + // + // If multiple search params are given and `filter_or` is FALSE or not specified, + // search params are combined in a logical AND operation. + // Only rows that match *all* search param criteria will be returned. + // + // If `filter_or` is TRUE, multiple search params are combined in a logical OR operation. + // Results will include rows that match **any** of the search criteria. + // + // String search params use case-insensitive matching. + // String search params can contain `%` and '_' as SQL LIKE pattern match wildcard expressions. + // example="dan%" will match "danger" and "Danzig" but not "David" + // example="D_m%" will match "Damage" and "dump" + // + // Integer search params can accept a single value or a comma separated list of values. The multiple + // values will be combined under a logical OR operation - results will match at least one of + // the given values. + // + // Most search params can accept "IS NULL" and "NOT NULL" as special expressions to match + // or exclude (respectively) rows where the column is null. + // + // Boolean search params accept only "true" and "false" as values. + // + // + // The parameters `limit`, and `offset` are recommended for fetching results in page-size chunks. + // + // Get a **single dashboard** by id with [dashboard()](#!/Dashboard/dashboard) + // + rpc SearchDashboards(SearchDashboardsRequest) returns (stream SearchDashboardsStreamResponse); + + // ### Import a LookML dashboard to a space as a UDD + // Creates a UDD (a dashboard which exists in the Looker database rather than as a LookML file) from the LookML dashboard + // and puts it in the space specified. The created UDD will have a lookml_link_id which links to the original LookML dashboard. + // + // To give the imported dashboard specify a (e.g. title: "my title") in the body of your request, otherwise the imported + // dashboard will have the same title as the original LookML dashboard. + // + // For this operation to succeed the user must have permission to see the LookML dashboard in question, and have permission to + // create content in the space the dashboard is being imported to. + // + // **Sync** a linked UDD with [sync_lookml_dashboard()](#!/Dashboard/sync_lookml_dashboard) + // **Unlink** a linked UDD by setting lookml_link_id to null with [update_dashboard()](#!/Dashboard/update_dashboard) + // + rpc ImportLookmlDashboard(ImportLookmlDashboardRequest) returns (stream ImportLookmlDashboardResponse); + + // ### Update all linked dashboards to match the specified LookML dashboard. + // + // Any UDD (a dashboard which exists in the Looker database rather than as a LookML file) which has a `lookml_link_id` + // property value referring to a LookML dashboard's id (model::dashboardname) will be updated so that it matches the current state of the LookML dashboard. + // + // For this operation to succeed the user must have permission to view the LookML dashboard, and only linked dashboards + // that the user has permission to update will be synced. + // + // To **link** or **unlink** a UDD set the `lookml_link_id` property with [update_dashboard()](#!/Dashboard/update_dashboard) + // + rpc SyncLookmlDashboard(SyncLookmlDashboardRequest) returns (stream SyncLookmlDashboardStreamResponse); + + // ### Get information about a dashboard + // + // Returns the full details of the identified dashboard object + // + // Get a **summary list** of all active dashboards with [all_dashboards()](#!/Dashboard/all_dashboards) + // + // You can **Search** for dashboards with [search_dashboards()](#!/Dashboard/search_dashboards) + // + rpc Dashboard(DashboardRequest) returns (stream DashboardResponse); + + // ### Update a dashboard + // + // You can use this function to change the string and integer properties of + // a dashboard. Nested objects such as filters, dashboard elements, or dashboard layout components + // cannot be modified by this function - use the update functions for the respective + // nested object types (like [update_dashboard_filter()](#!/3.1/Dashboard/update_dashboard_filter) to change a filter) + // to modify nested objects referenced by a dashboard. + // + // If you receive a 422 error response when updating a dashboard, be sure to look at the + // response body for information about exactly which fields are missing or contain invalid data. + // + rpc UpdateDashboard(UpdateDashboardRequest) returns (stream UpdateDashboardResponse); + + // ### Delete the dashboard with the specified id + // + // Permanently **deletes** a dashboard. (The dashboard cannot be recovered after this operation.) + // + // "Soft" delete or hide a dashboard by setting its `deleted` status to `True` with [update_dashboard()](#!/Dashboard/update_dashboard). + // + // Note: When a dashboard is deleted in the UI, it is soft deleted. Use this API call to permanently remove it, if desired. + // + rpc DeleteDashboard(DeleteDashboardRequest) returns (stream DeleteDashboardResponse); + + // ### Get Aggregate Table LookML for Each Query on a Dahboard + // + // Returns a JSON object that contains the dashboard id and Aggregate Table lookml + // + // + rpc DashboardAggregateTableLookml(DashboardAggregateTableLookmlRequest) returns (stream DashboardAggregateTableLookmlResponse); + + // ### Get lookml of a UDD + // + // Returns a JSON object that contains the dashboard id and the full lookml + // + // + rpc DashboardLookml(DashboardLookmlRequest) returns (stream DashboardLookmlResponse); + + // ### Search Dashboard Elements + // + // Returns an **array of DashboardElement objects** that match the specified search criteria. + // + // If multiple search params are given and `filter_or` is FALSE or not specified, + // search params are combined in a logical AND operation. + // Only rows that match *all* search param criteria will be returned. + // + // If `filter_or` is TRUE, multiple search params are combined in a logical OR operation. + // Results will include rows that match **any** of the search criteria. + // + // String search params use case-insensitive matching. + // String search params can contain `%` and '_' as SQL LIKE pattern match wildcard expressions. + // example="dan%" will match "danger" and "Danzig" but not "David" + // example="D_m%" will match "Damage" and "dump" + // + // Integer search params can accept a single value or a comma separated list of values. The multiple + // values will be combined under a logical OR operation - results will match at least one of + // the given values. + // + // Most search params can accept "IS NULL" and "NOT NULL" as special expressions to match + // or exclude (respectively) rows where the column is null. + // + // Boolean search params accept only "true" and "false" as values. + // + // + rpc SearchDashboardElements(SearchDashboardElementsRequest) returns (stream SearchDashboardElementsStreamResponse); + + // ### Get information about the dashboard element with a specific id. + rpc DashboardElement(DashboardElementRequest) returns (stream DashboardElementResponse); + + // ### Update the dashboard element with a specific id. + rpc UpdateDashboardElement(UpdateDashboardElementRequest) returns (stream UpdateDashboardElementResponse); + + // ### Delete a dashboard element with a specific id. + rpc DeleteDashboardElement(DeleteDashboardElementRequest) returns (stream DeleteDashboardElementResponse); + + // ### Get information about all the dashboard elements on a dashboard with a specific id. + rpc DashboardDashboardElements(DashboardDashboardElementsRequest) returns (stream DashboardDashboardElementsStreamResponse); + + // ### Create a dashboard element on the dashboard with a specific id. + rpc CreateDashboardElement(CreateDashboardElementRequest) returns (stream CreateDashboardElementResponse); + + // ### Get information about the dashboard filters with a specific id. + rpc DashboardFilter(DashboardFilterRequest) returns (stream DashboardFilterResponse); + + // ### Update the dashboard filter with a specific id. + rpc UpdateDashboardFilter(UpdateDashboardFilterRequest) returns (stream UpdateDashboardFilterResponse); + + // ### Delete a dashboard filter with a specific id. + rpc DeleteDashboardFilter(DeleteDashboardFilterRequest) returns (stream DeleteDashboardFilterResponse); + + // ### Get information about all the dashboard filters on a dashboard with a specific id. + rpc DashboardDashboardFilters(DashboardDashboardFiltersRequest) returns (stream DashboardDashboardFiltersStreamResponse); + + // ### Create a dashboard filter on the dashboard with a specific id. + rpc CreateDashboardFilter(CreateDashboardFilterRequest) returns (stream CreateDashboardFilterResponse); + + // ### Get information about the dashboard elements with a specific id. + rpc DashboardLayoutComponent(DashboardLayoutComponentRequest) returns (stream DashboardLayoutComponentResponse); + + // ### Update the dashboard element with a specific id. + rpc UpdateDashboardLayoutComponent(UpdateDashboardLayoutComponentRequest) returns (stream UpdateDashboardLayoutComponentResponse); + + // ### Get information about all the dashboard layout components for a dashboard layout with a specific id. + rpc DashboardLayoutDashboardLayoutComponents(DashboardLayoutDashboardLayoutComponentsRequest) returns (stream DashboardLayoutDashboardLayoutComponentsStreamResponse); + + // ### Get information about the dashboard layouts with a specific id. + rpc DashboardLayout(DashboardLayoutRequest) returns (stream DashboardLayoutResponse); + + // ### Update the dashboard layout with a specific id. + rpc UpdateDashboardLayout(UpdateDashboardLayoutRequest) returns (stream UpdateDashboardLayoutResponse); + + // ### Delete a dashboard layout with a specific id. + rpc DeleteDashboardLayout(DeleteDashboardLayoutRequest) returns (stream DeleteDashboardLayoutResponse); + + // ### Get information about all the dashboard elements on a dashboard with a specific id. + rpc DashboardDashboardLayouts(DashboardDashboardLayoutsRequest) returns (stream DashboardDashboardLayoutsStreamResponse); + + // ### Create a dashboard layout on the dashboard with a specific id. + rpc CreateDashboardLayout(CreateDashboardLayoutRequest) returns (stream CreateDashboardLayoutResponse); + + + + // DataAction: Run Data Actions + + // Perform a data action. The data action object can be obtained from query results, and used to perform an arbitrary action. + rpc PerformDataAction(PerformDataActionRequest) returns (stream PerformDataActionResponse); + + // For some data actions, the remote server may supply a form requesting further user input. This endpoint takes a data action, asks the remote server to generate a form for it, and returns that form to you for presentation to the user. + rpc FetchRemoteDataActionForm(FetchRemoteDataActionFormRequest) returns (stream FetchRemoteDataActionFormResponse); + + + + // Datagroup: Manage Datagroups + + // ### Get information about all datagroups. + // + rpc AllDatagroups(AllDatagroupsRequest) returns (stream AllDatagroupsStreamResponse); + + // ### Get information about a datagroup. + // + rpc Datagroup(DatagroupRequest) returns (stream DatagroupResponse); + + // ### Update a datagroup using the specified params. + // + rpc UpdateDatagroup(UpdateDatagroupRequest) returns (stream UpdateDatagroupResponse); + + + + // Folder: Manage Folders + + // Search for folders by creator id, parent id, name, etc + rpc SearchFolders(SearchFoldersRequest) returns (stream SearchFoldersStreamResponse); + + // ### Get information about the folder with a specific id. + rpc Folder(FolderRequest) returns (stream FolderResponse); + + // ### Update the folder with a specific id. + rpc UpdateFolder(UpdateFolderRequest) returns (stream UpdateFolderResponse); + + // ### Delete the folder with a specific id including any children folders. + // **DANGER** this will delete all looks and dashboards in the folder. + // + rpc DeleteFolder(DeleteFolderRequest) returns (stream DeleteFolderResponse); + + // ### Get information about all folders. + // + // In API 3.x, this will not return empty personal folders, unless they belong to the calling user. + // In API 4.0+, all personal folders will be returned. + // + // + rpc AllFolders(AllFoldersRequest) returns (stream AllFoldersStreamResponse); + + // ### Create a folder with specified information. + // + // Caller must have permission to edit the parent folder and to create folders, otherwise the request + // returns 404 Not Found. + // + rpc CreateFolder(CreateFolderRequest) returns (stream CreateFolderResponse); + + // ### Get the children of a folder. + rpc FolderChildren(FolderChildrenRequest) returns (stream FolderChildrenStreamResponse); + + // ### Search the children of a folder + rpc FolderChildrenSearch(FolderChildrenSearchRequest) returns (stream FolderChildrenSearchStreamResponse); + + // ### Get the parent of a folder + rpc FolderParent(FolderParentRequest) returns (stream FolderParentResponse); + + // ### Get the ancestors of a folder + rpc FolderAncestors(FolderAncestorsRequest) returns (stream FolderAncestorsStreamResponse); + + // ### Get all looks in a folder. + // In API 3.x, this will return all looks in a folder, including looks in the trash. + // In API 4.0+, all looks in a folder will be returned, excluding looks in the trash. + // + rpc FolderLooks(FolderLooksRequest) returns (stream FolderLooksStreamResponse); + + // ### Get the dashboards in a folder + rpc FolderDashboards(FolderDashboardsRequest) returns (stream FolderDashboardsStreamResponse); + + + + // Group: Manage Groups + + // ### Get information about all groups. + // + rpc AllGroups(AllGroupsRequest) returns (stream AllGroupsStreamResponse); + + // ### Creates a new group (admin only). + // + rpc CreateGroup(CreateGroupRequest) returns (stream CreateGroupResponse); + + // ### Search groups + // + // Returns all group records that match the given search criteria. + // + // If multiple search params are given and `filter_or` is FALSE or not specified, + // search params are combined in a logical AND operation. + // Only rows that match *all* search param criteria will be returned. + // + // If `filter_or` is TRUE, multiple search params are combined in a logical OR operation. + // Results will include rows that match **any** of the search criteria. + // + // String search params use case-insensitive matching. + // String search params can contain `%` and '_' as SQL LIKE pattern match wildcard expressions. + // example="dan%" will match "danger" and "Danzig" but not "David" + // example="D_m%" will match "Damage" and "dump" + // + // Integer search params can accept a single value or a comma separated list of values. The multiple + // values will be combined under a logical OR operation - results will match at least one of + // the given values. + // + // Most search params can accept "IS NULL" and "NOT NULL" as special expressions to match + // or exclude (respectively) rows where the column is null. + // + // Boolean search params accept only "true" and "false" as values. + // + // + rpc SearchGroups(SearchGroupsRequest) returns (stream SearchGroupsStreamResponse); + + // ### Search groups include roles + // + // Returns all group records that match the given search criteria, and attaches any associated roles. + // + // If multiple search params are given and `filter_or` is FALSE or not specified, + // search params are combined in a logical AND operation. + // Only rows that match *all* search param criteria will be returned. + // + // If `filter_or` is TRUE, multiple search params are combined in a logical OR operation. + // Results will include rows that match **any** of the search criteria. + // + // String search params use case-insensitive matching. + // String search params can contain `%` and '_' as SQL LIKE pattern match wildcard expressions. + // example="dan%" will match "danger" and "Danzig" but not "David" + // example="D_m%" will match "Damage" and "dump" + // + // Integer search params can accept a single value or a comma separated list of values. The multiple + // values will be combined under a logical OR operation - results will match at least one of + // the given values. + // + // Most search params can accept "IS NULL" and "NOT NULL" as special expressions to match + // or exclude (respectively) rows where the column is null. + // + // Boolean search params accept only "true" and "false" as values. + // + // + rpc SearchGroupsWithRoles(SearchGroupsWithRolesRequest) returns (stream SearchGroupsWithRolesStreamResponse); + + // ### Search groups include hierarchy + // + // Returns all group records that match the given search criteria, and attaches + // associated role_ids and parent group_ids. + // + // If multiple search params are given and `filter_or` is FALSE or not specified, + // search params are combined in a logical AND operation. + // Only rows that match *all* search param criteria will be returned. + // + // If `filter_or` is TRUE, multiple search params are combined in a logical OR operation. + // Results will include rows that match **any** of the search criteria. + // + // String search params use case-insensitive matching. + // String search params can contain `%` and '_' as SQL LIKE pattern match wildcard expressions. + // example="dan%" will match "danger" and "Danzig" but not "David" + // example="D_m%" will match "Damage" and "dump" + // + // Integer search params can accept a single value or a comma separated list of values. The multiple + // values will be combined under a logical OR operation - results will match at least one of + // the given values. + // + // Most search params can accept "IS NULL" and "NOT NULL" as special expressions to match + // or exclude (respectively) rows where the column is null. + // + // Boolean search params accept only "true" and "false" as values. + // + // + rpc SearchGroupsWithHierarchy(SearchGroupsWithHierarchyRequest) returns (stream SearchGroupsWithHierarchyStreamResponse); + + // ### Get information about a group. + // + rpc Group(GroupRequest) returns (stream GroupResponse); + + // ### Updates the a group (admin only). + rpc UpdateGroup(UpdateGroupRequest) returns (stream UpdateGroupResponse); + + // ### Deletes a group (admin only). + // + rpc DeleteGroup(DeleteGroupRequest) returns (stream DeleteGroupResponse); + + // ### Get information about all the groups in a group + // + rpc AllGroupGroups(AllGroupGroupsRequest) returns (stream AllGroupGroupsStreamResponse); + + // ### Adds a new group to a group. + // + rpc AddGroupGroup(AddGroupGroupRequest) returns (stream AddGroupGroupResponse); + + // ### Get information about all the users directly included in a group. + // + rpc AllGroupUsers(AllGroupUsersRequest) returns (stream AllGroupUsersStreamResponse); + + // ### Adds a new user to a group. + // + rpc AddGroupUser(AddGroupUserRequest) returns (stream AddGroupUserResponse); + + // ### Removes a user from a group. + // + rpc DeleteGroupUser(DeleteGroupUserRequest) returns (stream DeleteGroupUserResponse); + + // ### Removes a group from a group. + // + rpc DeleteGroupFromGroup(DeleteGroupFromGroupRequest) returns (stream DeleteGroupFromGroupResponse); + + // ### Set the value of a user attribute for a group. + // + // For information about how user attribute values are calculated, see [Set User Attribute Group Values](#!/UserAttribute/set_user_attribute_group_values). + // + rpc UpdateUserAttributeGroupValue(UpdateUserAttributeGroupValueRequest) returns (stream UpdateUserAttributeGroupValueResponse); + + // ### Remove a user attribute value from a group. + // + rpc DeleteUserAttributeGroupValue(DeleteUserAttributeGroupValueRequest) returns (stream DeleteUserAttributeGroupValueResponse); + + + + // Homepage: Manage Homepage + + // ### Get information about the primary homepage's sections. + // + rpc AllPrimaryHomepageSections(AllPrimaryHomepageSectionsRequest) returns (stream AllPrimaryHomepageSectionsStreamResponse); + + + + // Integration: Manage Integrations + + // ### Get information about all Integration Hubs. + // + rpc AllIntegrationHubs(AllIntegrationHubsRequest) returns (stream AllIntegrationHubsStreamResponse); + + // ### Create a new Integration Hub. + // + // This API is rate limited to prevent it from being used for SSRF attacks + // + rpc CreateIntegrationHub(CreateIntegrationHubRequest) returns (stream CreateIntegrationHubResponse); + + // ### Get information about a Integration Hub. + // + rpc IntegrationHub(IntegrationHubRequest) returns (stream IntegrationHubResponse); + + // ### Update a Integration Hub definition. + // + // This API is rate limited to prevent it from being used for SSRF attacks + // + rpc UpdateIntegrationHub(UpdateIntegrationHubRequest) returns (stream UpdateIntegrationHubResponse); + + // ### Delete a Integration Hub. + // + rpc DeleteIntegrationHub(DeleteIntegrationHubRequest) returns (stream DeleteIntegrationHubResponse); + + // Accepts the legal agreement for a given integration hub. This only works for integration hubs that have legal_agreement_required set to true and legal_agreement_signed set to false. + rpc AcceptIntegrationHubLegalAgreement(AcceptIntegrationHubLegalAgreementRequest) returns (stream AcceptIntegrationHubLegalAgreementResponse); + + // ### Get information about all Integrations. + // + rpc AllIntegrations(AllIntegrationsRequest) returns (stream AllIntegrationsStreamResponse); + + // ### Get information about a Integration. + // + rpc Integration(IntegrationRequest) returns (stream IntegrationResponse); + + // ### Update parameters on a Integration. + // + rpc UpdateIntegration(UpdateIntegrationRequest) returns (stream UpdateIntegrationResponse); + + // Returns the Integration form for presentation to the user. + rpc FetchIntegrationForm(FetchIntegrationFormRequest) returns (stream FetchIntegrationFormResponse); + + // Tests the integration to make sure all the settings are working. + rpc TestIntegration(TestIntegrationRequest) returns (stream TestIntegrationResponse); + + + + // Look: Run and Manage Looks + + // ### Get information about all active Looks + // + // Returns an array of **abbreviated Look objects** describing all the looks that the caller has access to. Soft-deleted Looks are **not** included. + // + // Get the **full details** of a specific look by id with [look(id)](#!/Look/look) + // + // Find **soft-deleted looks** with [search_looks()](#!/Look/search_looks) + // + rpc AllLooks(AllLooksRequest) returns (stream AllLooksStreamResponse); + + // ### Create a Look + // + // To create a look to display query data, first create the query with [create_query()](#!/Query/create_query) + // then assign the query's id to the `query_id` property in the call to `create_look()`. + // + // To place the look into a particular space, assign the space's id to the `space_id` property + // in the call to `create_look()`. + // + rpc CreateLook(CreateLookRequest) returns (stream CreateLookResponse); + + // ### Search Looks + // + // Returns an **array of Look objects** that match the specified search criteria. + // + // If multiple search params are given and `filter_or` is FALSE or not specified, + // search params are combined in a logical AND operation. + // Only rows that match *all* search param criteria will be returned. + // + // If `filter_or` is TRUE, multiple search params are combined in a logical OR operation. + // Results will include rows that match **any** of the search criteria. + // + // String search params use case-insensitive matching. + // String search params can contain `%` and '_' as SQL LIKE pattern match wildcard expressions. + // example="dan%" will match "danger" and "Danzig" but not "David" + // example="D_m%" will match "Damage" and "dump" + // + // Integer search params can accept a single value or a comma separated list of values. The multiple + // values will be combined under a logical OR operation - results will match at least one of + // the given values. + // + // Most search params can accept "IS NULL" and "NOT NULL" as special expressions to match + // or exclude (respectively) rows where the column is null. + // + // Boolean search params accept only "true" and "false" as values. + // + // + // Get a **single look** by id with [look(id)](#!/Look/look) + // + rpc SearchLooks(SearchLooksRequest) returns (stream SearchLooksStreamResponse); + + // ### Get a Look. + // + // Returns detailed information about a Look and its associated Query. + // + // + rpc Look(LookRequest) returns (stream LookResponse); + + // ### Modify a Look + // + // Use this function to modify parts of a look. Property values given in a call to `update_look` are + // applied to the existing look, so there's no need to include properties whose values are not changing. + // It's best to specify only the properties you want to change and leave everything else out + // of your `update_look` call. **Look properties marked 'read-only' will be ignored.** + // + // When a user deletes a look in the Looker UI, the look data remains in the database but is + // marked with a deleted flag ("soft-deleted"). Soft-deleted looks can be undeleted (by an admin) + // if the delete was in error. + // + // To soft-delete a look via the API, use [update_look()](#!/Look/update_look) to change the look's `deleted` property to `true`. + // You can undelete a look by calling `update_look` to change the look's `deleted` property to `false`. + // + // Soft-deleted looks are excluded from the results of [all_looks()](#!/Look/all_looks) and [search_looks()](#!/Look/search_looks), so they + // essentially disappear from view even though they still reside in the db. + // In API 3.1 and later, you can pass `deleted: true` as a parameter to [search_looks()](#!/3.1/Look/search_looks) to list soft-deleted looks. + // + // NOTE: [delete_look()](#!/Look/delete_look) performs a "hard delete" - the look data is removed from the Looker + // database and destroyed. There is no "undo" for `delete_look()`. + // + rpc UpdateLook(UpdateLookRequest) returns (stream UpdateLookResponse); + + // ### Permanently Delete a Look + // + // This operation **permanently** removes a look from the Looker database. + // + // NOTE: There is no "undo" for this kind of delete. + // + // For information about soft-delete (which can be undone) see [update_look()](#!/Look/update_look). + // + rpc DeleteLook(DeleteLookRequest) returns (stream DeleteLookResponse); + + // ### Run a Look + // + // Runs a given look's query and returns the results in the requested format. + // + // Supported formats: + // + // | result_format | Description + // | :-----------: | :--- | + // | json | Plain json + // | json_detail | Row data plus metadata describing the fields, pivots, table calcs, and other aspects of the query + // | csv | Comma separated values with a header + // | txt | Tab separated values with a header + // | html | Simple html + // | md | Simple markdown + // | xlsx | MS Excel spreadsheet + // | sql | Returns the generated SQL rather than running the query + // | png | A PNG image of the visualization of the query + // | jpg | A JPG image of the visualization of the query + // + // + // + rpc RunLook(RunLookRequest) returns (stream RunLookResponse); + + + + // LookmlModel: Manage LookML Models + + // ### Get information about all lookml models. + // + rpc AllLookmlModels(AllLookmlModelsRequest) returns (stream AllLookmlModelsStreamResponse); + + // ### Create a lookml model using the specified configuration. + // + rpc CreateLookmlModel(CreateLookmlModelRequest) returns (stream CreateLookmlModelResponse); + + // ### Get information about a lookml model. + // + rpc LookmlModel(LookmlModelRequest) returns (stream LookmlModelResponse); + + // ### Update a lookml model using the specified configuration. + // + rpc UpdateLookmlModel(UpdateLookmlModelRequest) returns (stream UpdateLookmlModelResponse); + + // ### Delete a lookml model. + // + rpc DeleteLookmlModel(DeleteLookmlModelRequest) returns (stream DeleteLookmlModelResponse); + + // ### Get information about a lookml model explore. + // + rpc LookmlModelExplore(LookmlModelExploreRequest) returns (stream LookmlModelExploreResponse); + + + + // Metadata: Connection Metadata Features + + // ### Field name suggestions for a model and view + // + // + rpc ModelFieldnameSuggestions(ModelFieldnameSuggestionsRequest) returns (stream ModelFieldnameSuggestionsResponse); + + // ### List databases available to this connection + // + // Certain dialects can support multiple databases per single connection. + // If this connection supports multiple databases, the database names will be returned in an array. + // + // Connections using dialects that do not support multiple databases will return an empty array. + // + // **Note**: [Connection Features](#!/Metadata/connection_features) can be used to determine if a connection supports + // multiple databases. + // + rpc ConnectionDatabases(ConnectionDatabasesRequest) returns (stream ConnectionDatabasesStreamResponse); + + // ### Retrieve metadata features for this connection + // + // Returns a list of feature names with `true` (available) or `false` (not available) + // + // + rpc ConnectionFeatures(ConnectionFeaturesRequest) returns (stream ConnectionFeaturesResponse); + + // ### Get the list of schemas and tables for a connection + // + // + rpc ConnectionSchemas(ConnectionSchemasRequest) returns (stream ConnectionSchemasStreamResponse); + + // ### Get the list of tables for a schema + // + // For dialects that support multiple databases, optionally identify which to use. If not provided, the default + // database for the connection will be used. + // + // For dialects that do **not** support multiple databases, **do not use** the database parameter + // + rpc ConnectionTables(ConnectionTablesRequest) returns (stream ConnectionTablesStreamResponse); + + // ### Get the columns (and therefore also the tables) in a specific schema + // + // + rpc ConnectionColumns(ConnectionColumnsRequest) returns (stream ConnectionColumnsStreamResponse); + + // ### Search a connection for columns matching the specified name + // + // **Note**: `column_name` must be a valid column name. It is not a search pattern. + // + rpc ConnectionSearchColumns(ConnectionSearchColumnsRequest) returns (stream ConnectionSearchColumnsStreamResponse); + + // ### Connection cost estimating + // + // Assign a `sql` statement to the body of the request. e.g., for Ruby, `{sql: 'select * from users'}` + // + // **Note**: If the connection's dialect has no support for cost estimates, an error will be returned + // + rpc ConnectionCostEstimate(ConnectionCostEstimateRequest) returns (stream ConnectionCostEstimateResponse); + + + + // Project: Manage Projects + + // ### Generate Lockfile for All LookML Dependencies + // + // Git must have been configured, must be in dev mode and deploy permission required + // + // Install_all is a two step process + // 1. For each remote_dependency in a project the dependency manager will resolve any ambiguous ref. + // 2. The project will then write out a lockfile including each remote_dependency with its resolved ref. + // + // + rpc LockAll(LockAllRequest) returns (stream LockAllResponse); + + // ### Get All Git Branches + // + // Returns a list of git branches in the project repository + // + rpc AllGitBranches(AllGitBranchesRequest) returns (stream AllGitBranchesStreamResponse); + + // ### Get the Current Git Branch + // + // Returns the git branch currently checked out in the given project repository + // + rpc GitBranch(GitBranchRequest) returns (stream GitBranchResponse); + + // ### Checkout and/or reset --hard an existing Git Branch + // + // Only allowed in development mode + // - Call `update_session` to select the 'dev' workspace. + // + // Checkout an existing branch if name field is different from the name of the currently checked out branch. + // + // Optionally specify a branch name, tag name or commit SHA to which the branch should be reset. + // **DANGER** hard reset will be force pushed to the remote. Unsaved changes and commits may be permanently lost. + // + // + rpc UpdateGitBranch(UpdateGitBranchRequest) returns (stream UpdateGitBranchResponse); + + // ### Create and Checkout a Git Branch + // + // Creates and checks out a new branch in the given project repository + // Only allowed in development mode + // - Call `update_session` to select the 'dev' workspace. + // + // Optionally specify a branch name, tag name or commit SHA as the start point in the ref field. + // If no ref is specified, HEAD of the current branch will be used as the start point for the new branch. + // + // + rpc CreateGitBranch(CreateGitBranchRequest) returns (stream CreateGitBranchResponse); + + // ### Get the specified Git Branch + // + // Returns the git branch specified in branch_name path param if it exists in the given project repository + // + rpc FindGitBranch(FindGitBranchRequest) returns (stream FindGitBranchResponse); + + // ### Delete the specified Git Branch + // + // Delete git branch specified in branch_name path param from local and remote of specified project repository + // + rpc DeleteGitBranch(DeleteGitBranchRequest) returns (stream DeleteGitBranchResponse); + + // ### Deploy a Remote Branch or Ref to Production + // + // Git must have been configured and deploy permission required. + // + // Deploy is a one/two step process + // 1. If this is the first deploy of this project, create the production project with git repository. + // 2. Pull the branch or ref into the production project. + // + // Can only specify either a branch or a ref. + // + // + rpc DeployRefToProduction(DeployRefToProductionRequest) returns (stream DeployRefToProductionResponse); + + // ### Deploy LookML from this Development Mode Project to Production + // + // Git must have been configured, must be in dev mode and deploy permission required + // + // Deploy is a two / three step process: + // + // 1. Push commits in current branch of dev mode project to the production branch (origin/master). + // Note a. This step is skipped in read-only projects. + // Note b. If this step is unsuccessful for any reason (e.g. rejected non-fastforward because production branch has + // commits not in current branch), subsequent steps will be skipped. + // 2. If this is the first deploy of this project, create the production project with git repository. + // 3. Pull the production branch into the production project. + // + // + rpc DeployToProduction(DeployToProductionRequest) returns (stream DeployToProductionResponse); + + // ### Reset a project to the revision of the project that is in production. + // + // **DANGER** this will delete any changes that have not been pushed to a remote repository. + // + rpc ResetProjectToProduction(ResetProjectToProductionRequest) returns (stream ResetProjectToProductionResponse); + + // ### Reset a project development branch to the revision of the project that is on the remote. + // + // **DANGER** this will delete any changes that have not been pushed to a remote repository. + // + rpc ResetProjectToRemote(ResetProjectToRemoteRequest) returns (stream ResetProjectToRemoteResponse); + + // ### Get All Projects + // + // Returns all projects visible to the current user + // + rpc AllProjects(AllProjectsRequest) returns (stream AllProjectsStreamResponse); + + // ### Create A Project + // + // dev mode required. + // - Call `update_session` to select the 'dev' workspace. + // + // `name` is required. + // `git_remote_url` is not allowed. To configure Git for the newly created project, follow the instructions in `update_project`. + // + // + rpc CreateProject(CreateProjectRequest) returns (stream CreateProjectResponse); + + // ### Get A Project + // + // Returns the project with the given project id + // + rpc Project(ProjectRequest) returns (stream ProjectResponse); + + // ### Update Project Configuration + // + // Apply changes to a project's configuration. + // + // + // #### Configuring Git for a Project + // + // To set up a Looker project with a remote git repository, follow these steps: + // + // 1. Call `update_session` to select the 'dev' workspace. + // 1. Call `create_git_deploy_key` to create a new deploy key for the project + // 1. Copy the deploy key text into the remote git repository's ssh key configuration + // 1. Call `update_project` to set project's `git_remote_url` ()and `git_service_name`, if necessary). + // + // When you modify a project's `git_remote_url`, Looker connects to the remote repository to fetch + // metadata. The remote git repository MUST be configured with the Looker-generated deploy + // key for this project prior to setting the project's `git_remote_url`. + // + // To set up a Looker project with a git repository residing on the Looker server (a 'bare' git repo): + // + // 1. Call `update_session` to select the 'dev' workspace. + // 1. Call `update_project` setting `git_remote_url` to null and `git_service_name` to "bare". + // + // + rpc UpdateProject(UpdateProjectRequest) returns (stream UpdateProjectResponse); + + // ### Get A Projects Manifest object + // + // Returns the project with the given project id + // + rpc Manifest(ManifestRequest) returns (stream ManifestResponse); + + // ### Git Deploy Key + // + // Returns the ssh public key previously created for a project's git repository. + // + rpc GitDeployKey(GitDeployKeyRequest) returns (stream GitDeployKeyResponse); + + // ### Create Git Deploy Key + // + // Create a public/private key pair for authenticating ssh git requests from Looker to a remote git repository + // for a particular Looker project. + // + // Returns the public key of the generated ssh key pair. + // + // Copy this public key to your remote git repository's ssh keys configuration so that the remote git service can + // validate and accept git requests from the Looker server. + // + rpc CreateGitDeployKey(CreateGitDeployKeyRequest) returns (stream CreateGitDeployKeyResponse); + + // ### Get Cached Project Validation Results + // + // Returns the cached results of a previous project validation calculation, if any. + // Returns http status 204 No Content if no validation results exist. + // + // Validating the content of all the files in a project can be computationally intensive + // for large projects. Use this API to simply fetch the results of the most recent + // project validation rather than revalidating the entire project from scratch. + // + // A value of `"stale": true` in the response indicates that the project has changed since + // the cached validation results were computed. The cached validation results may no longer + // reflect the current state of the project. + // + rpc ProjectValidationResults(ProjectValidationResultsRequest) returns (stream ProjectValidationResultsResponse); + + // ### Validate Project + // + // Performs lint validation of all lookml files in the project. + // Returns a list of errors found, if any. + // + // Validating the content of all the files in a project can be computationally intensive + // for large projects. For best performance, call `validate_project(project_id)` only + // when you really want to recompute project validation. To quickly display the results of + // the most recent project validation (without recomputing), use `project_validation_results(project_id)` + // + rpc ValidateProject(ValidateProjectRequest) returns (stream ValidateProjectResponse); + + // ### Get Project Workspace + // + // Returns information about the state of the project files in the currently selected workspace + // + rpc ProjectWorkspace(ProjectWorkspaceRequest) returns (stream ProjectWorkspaceResponse); + + // ### Get All Project Files + // + // Returns a list of the files in the project + // + rpc AllProjectFiles(AllProjectFilesRequest) returns (stream AllProjectFilesStreamResponse); + + // ### Get Project File Info + // + // Returns information about a file in the project + // + rpc ProjectFile(ProjectFileRequest) returns (stream ProjectFileResponse); + + // ### Get All Git Connection Tests + // + // dev mode required. + // - Call `update_session` to select the 'dev' workspace. + // + // Returns a list of tests which can be run against a project's (or the dependency project for the provided remote_url) git connection. Call [Run Git Connection Test](#!/Project/run_git_connection_test) to execute each test in sequence. + // + // Tests are ordered by increasing specificity. Tests should be run in the order returned because later tests require functionality tested by tests earlier in the test list. + // + // For example, a late-stage test for write access is meaningless if connecting to the git server (an early test) is failing. + // + rpc AllGitConnectionTests(AllGitConnectionTestsRequest) returns (stream AllGitConnectionTestsStreamResponse); + + // ### Run a git connection test + // + // Run the named test on the git service used by this project (or the dependency project for the provided remote_url) and return the result. This + // is intended to help debug git connections when things do not work properly, to give + // more helpful information about why a git url is not working with Looker. + // + // Tests should be run in the order they are returned by [Get All Git Connection Tests](#!/Project/all_git_connection_tests). + // + rpc RunGitConnectionTest(RunGitConnectionTestRequest) returns (stream RunGitConnectionTestResponse); + + // ### Get All LookML Tests + // + // Returns a list of tests which can be run to validate a project's LookML code and/or the underlying data, + // optionally filtered by the file id. + // Call [Run LookML Test](#!/Project/run_lookml_test) to execute tests. + // + rpc AllLookmlTests(AllLookmlTestsRequest) returns (stream AllLookmlTestsStreamResponse); + + // ### Run LookML Tests + // + // Runs all tests in the project, optionally filtered by file, test, and/or model. + // + rpc RunLookmlTest(RunLookmlTestRequest) returns (stream RunLookmlTestStreamResponse); + + // ### Configure Repository Credential for a remote dependency + // + // Admin required. + // + // `root_project_id` is required. + // `credential_id` is required. + // + // + rpc UpdateRepositoryCredential(UpdateRepositoryCredentialRequest) returns (stream UpdateRepositoryCredentialResponse); + + // ### Repository Credential for a remote dependency + // + // Admin required. + // + // `root_project_id` is required. + // `credential_id` is required. + // + rpc DeleteRepositoryCredential(DeleteRepositoryCredentialRequest) returns (stream DeleteRepositoryCredentialResponse); + + // ### Get all Repository Credentials for a project + // + // `root_project_id` is required. + // + rpc GetAllRepositoryCredentials(GetAllRepositoryCredentialsRequest) returns (stream GetAllRepositoryCredentialsStreamResponse); + + + + // Query: Run and Manage Queries + + // ### Create an async query task + // + // Creates a query task (job) to run a previously created query asynchronously. Returns a Query Task ID. + // + // Use [query_task(query_task_id)](#!/Query/query_task) to check the execution status of the query task. + // After the query task status reaches "Complete", use [query_task_results(query_task_id)](#!/Query/query_task_results) to fetch the results of the query. + // + rpc CreateQueryTask(CreateQueryTaskRequest) returns (stream CreateQueryTaskResponse); + + // ### Fetch results of multiple async queries + // + // Returns the results of multiple async queries in one request. + // + // For Query Tasks that are not completed, the response will include the execution status of the Query Task but will not include query results. + // Query Tasks whose results have expired will have a status of 'expired'. + // If the user making the API request does not have sufficient privileges to view a Query Task result, the result will have a status of 'missing' + // + rpc QueryTaskMultiResults(QueryTaskMultiResultsRequest) returns (stream QueryTaskMultiResultsResponse); + + // ### Get Query Task details + // + // Use this function to check the status of an async query task. After the status + // reaches "Complete", you can call [query_task_results(query_task_id)](#!/Query/query_task_results) to + // retrieve the results of the query. + // + // Use [create_query_task()](#!/Query/create_query_task) to create an async query task. + // + rpc QueryTask(QueryTaskRequest) returns (stream QueryTaskResponse); + + // ### Get Async Query Results + // + // Returns the results of an async query task if the query has completed. + // + // If the query task is still running or waiting to run, this function returns 204 No Content. + // + // If the query task ID is invalid or the cached results of the query task have expired, this function returns 404 Not Found. + // + // Use [query_task(query_task_id)](#!/Query/query_task) to check the execution status of the query task + // Call query_task_results only after the query task status reaches "Complete". + // + // You can also use [query_task_multi_results()](#!/Query/query_task_multi_results) retrieve the + // results of multiple async query tasks at the same time. + // + // #### SQL Error Handling: + // If the query fails due to a SQL db error, how this is communicated depends on the result_format you requested in `create_query_task()`. + // + // For `json_detail` result_format: `query_task_results()` will respond with HTTP status '200 OK' and db SQL error info + // will be in the `errors` property of the response object. The 'data' property will be empty. + // + // For all other result formats: `query_task_results()` will respond with HTTP status `400 Bad Request` and some db SQL error info + // will be in the message of the 400 error response, but not as detailed as expressed in `json_detail.errors`. + // These data formats can only carry row data, and error info is not row data. + // + rpc QueryTaskResults(QueryTaskResultsRequest) returns (stream QueryTaskResultsResponse); + + // ### Get a previously created query by id. + // + // A Looker query object includes the various parameters that define a database query that has been run or + // could be run in the future. These parameters include: model, view, fields, filters, pivots, etc. + // Query *results* are not part of the query object. + // + // Query objects are unique and immutable. Query objects are created automatically in Looker as users explore data. + // Looker does not delete them; they become part of the query history. When asked to create a query for + // any given set of parameters, Looker will first try to find an existing query object with matching + // parameters and will only create a new object when an appropriate object can not be found. + // + // This 'get' method is used to get the details about a query for a given id. See the other methods here + // to 'create' and 'run' queries. + // + // Note that some fields like 'filter_config' and 'vis_config' etc are specific to how the Looker UI + // builds queries and visualizations and are not generally useful for API use. They are not required when + // creating new queries and can usually just be ignored. + // + // + rpc Query(QueryRequest) returns (stream QueryResponse); + + // ### Get the query for a given query slug. + // + // This returns the query for the 'slug' in a query share URL. + // + // The 'slug' is a randomly chosen short string that is used as an alternative to the query's id value + // for use in URLs etc. This method exists as a convenience to help you use the API to 'find' queries that + // have been created using the Looker UI. + // + // You can use the Looker explore page to build a query and then choose the 'Share' option to + // show the share url for the query. Share urls generally look something like 'https://looker.yourcompany/x/vwGSbfc'. + // The trailing 'vwGSbfc' is the share slug. You can pass that string to this api method to get details about the query. + // Those details include the 'id' that you can use to run the query. Or, you can copy the query body + // (perhaps with your own modification) and use that as the basis to make/run new queries. + // + // This will also work with slugs from Looker explore urls like + // 'https://looker.yourcompany/explore/ecommerce/orders?qid=aogBgL6o3cKK1jN3RoZl5s'. In this case + // 'aogBgL6o3cKK1jN3RoZl5s' is the slug. + // + rpc QueryForSlug(QueryForSlugRequest) returns (stream QueryForSlugResponse); + + // ### Create a query. + // + // This allows you to create a new query that you can later run. Looker queries are immutable once created + // and are not deleted. If you create a query that is exactly like an existing query then the existing query + // will be returned and no new query will be created. Whether a new query is created or not, you can use + // the 'id' in the returned query with the 'run' method. + // + // The query parameters are passed as json in the body of the request. + // + // + rpc CreateQuery(CreateQueryRequest) returns (stream CreateQueryResponse); + + // ### Run a saved query. + // + // This runs a previously saved query. You can use this on a query that was generated in the Looker UI + // or one that you have explicitly created using the API. You can also use a query 'id' from a saved 'Look'. + // + // The 'result_format' parameter specifies the desired structure and format of the response. + // + // Supported formats: + // + // | result_format | Description + // | :-----------: | :--- | + // | json | Plain json + // | json_detail | Row data plus metadata describing the fields, pivots, table calcs, and other aspects of the query + // | csv | Comma separated values with a header + // | txt | Tab separated values with a header + // | html | Simple html + // | md | Simple markdown + // | xlsx | MS Excel spreadsheet + // | sql | Returns the generated SQL rather than running the query + // | png | A PNG image of the visualization of the query + // | jpg | A JPG image of the visualization of the query + // + // + // + rpc RunQuery(RunQueryRequest) returns (stream RunQueryResponse); + + // ### Run the query that is specified inline in the posted body. + // + // This allows running a query as defined in json in the posted body. This combines + // the two actions of posting & running a query into one step. + // + // Here is an example body in json: + // ``` + // { + // "model":"thelook", + // "view":"inventory_items", + // "fields":["category.name","inventory_items.days_in_inventory_tier","products.count"], + // "filters":{"category.name":"socks"}, + // "sorts":["products.count desc 0"], + // "limit":"500", + // "query_timezone":"America/Los_Angeles" + // } + // ``` + // + // When using the Ruby SDK this would be passed as a Ruby hash like: + // ``` + // { + // :model=>"thelook", + // :view=>"inventory_items", + // :fields=> + // ["category.name", + // "inventory_items.days_in_inventory_tier", + // "products.count"], + // :filters=>{:"category.name"=>"socks"}, + // :sorts=>["products.count desc 0"], + // :limit=>"500", + // :query_timezone=>"America/Los_Angeles", + // } + // ``` + // + // This will return the result of running the query in the format specified by the 'result_format' parameter. + // + // Supported formats: + // + // | result_format | Description + // | :-----------: | :--- | + // | json | Plain json + // | json_detail | Row data plus metadata describing the fields, pivots, table calcs, and other aspects of the query + // | csv | Comma separated values with a header + // | txt | Tab separated values with a header + // | html | Simple html + // | md | Simple markdown + // | xlsx | MS Excel spreadsheet + // | sql | Returns the generated SQL rather than running the query + // | png | A PNG image of the visualization of the query + // | jpg | A JPG image of the visualization of the query + // + // + // + rpc RunInlineQuery(RunInlineQueryRequest) returns (stream RunInlineQueryResponse); + + // ### Run an URL encoded query. + // + // This requires the caller to encode the specifiers for the query into the URL query part using + // Looker-specific syntax as explained below. + // + // Generally, you would want to use one of the methods that takes the parameters as json in the POST body + // for creating and/or running queries. This method exists for cases where one really needs to encode the + // parameters into the URL of a single 'GET' request. This matches the way that the Looker UI formats + // 'explore' URLs etc. + // + // The parameters here are very similar to the json body formatting except that the filter syntax is + // tricky. Unfortunately, this format makes this method not currently callable via the 'Try it out!' button + // in this documentation page. But, this is callable when creating URLs manually or when using the Looker SDK. + // + // Here is an example inline query URL: + // + // ``` + // https://looker.mycompany.com:19999/api/3.0/queries/models/thelook/views/inventory_items/run/json?fields=category.name,inventory_items.days_in_inventory_tier,products.count&f[category.name]=socks&sorts=products.count+desc+0&limit=500&query_timezone=America/Los_Angeles + // ``` + // + // When invoking this endpoint with the Ruby SDK, pass the query parameter parts as a hash. The hash to match the above would look like: + // + // ```ruby + // query_params = + // { + // :fields => "category.name,inventory_items.days_in_inventory_tier,products.count", + // :"f[category.name]" => "socks", + // :sorts => "products.count desc 0", + // :limit => "500", + // :query_timezone => "America/Los_Angeles" + // } + // response = ruby_sdk.run_url_encoded_query('thelook','inventory_items','json', query_params) + // + // ``` + // + // Again, it is generally easier to use the variant of this method that passes the full query in the POST body. + // This method is available for cases where other alternatives won't fit the need. + // + // Supported formats: + // + // | result_format | Description + // | :-----------: | :--- | + // | json | Plain json + // | json_detail | Row data plus metadata describing the fields, pivots, table calcs, and other aspects of the query + // | csv | Comma separated values with a header + // | txt | Tab separated values with a header + // | html | Simple html + // | md | Simple markdown + // | xlsx | MS Excel spreadsheet + // | sql | Returns the generated SQL rather than running the query + // | png | A PNG image of the visualization of the query + // | jpg | A JPG image of the visualization of the query + // + // + // + rpc RunUrlEncodedQuery(RunUrlEncodedQueryRequest) returns (stream RunUrlEncodedQueryResponse); + + // ### Get Merge Query + // + // Returns a merge query object given its id. + // + rpc MergeQuery(MergeQueryRequest) returns (stream MergeQueryResponse); + + // ### Create Merge Query + // + // Creates a new merge query object. + // + // A merge query takes the results of one or more queries and combines (merges) the results + // according to field mapping definitions. The result is similar to a SQL left outer join. + // + // A merge query can merge results of queries from different SQL databases. + // + // The order that queries are defined in the source_queries array property is significant. The + // first query in the array defines the primary key into which the results of subsequent + // queries will be merged. + // + // Like model/view query objects, merge queries are immutable and have structural identity - if + // you make a request to create a new merge query that is identical to an existing merge query, + // the existing merge query will be returned instead of creating a duplicate. Conversely, any + // change to the contents of a merge query will produce a new object with a new id. + // + rpc CreateMergeQuery(CreateMergeQueryRequest) returns (stream CreateMergeQueryResponse); + + // Get information about all running queries. + // + rpc AllRunningQueries(AllRunningQueriesRequest) returns (stream AllRunningQueriesStreamResponse); + + // Kill a query with a specific query_task_id. + // + rpc KillQuery(KillQueryRequest) returns (stream KillQueryResponse); + + // Get a SQL Runner query. + rpc SqlQuery(SqlQueryRequest) returns (stream SqlQueryResponse); + + // ### Create a SQL Runner Query + // + // Either the `connection_name` or `model_name` parameter MUST be provided. + // + rpc CreateSqlQuery(CreateSqlQueryRequest) returns (stream CreateSqlQueryResponse); + + // Execute a SQL Runner query in a given result_format. + rpc RunSqlQuery(RunSqlQueryRequest) returns (stream RunSqlQueryResponse); + + + + // RenderTask: Manage Render Tasks + + // ### Create a new task to render a look to an image. + // + // Returns a render task object. + // To check the status of a render task, pass the render_task.id to [Get Render Task](#!/RenderTask/get_render_task). + // Once the render task is complete, you can download the resulting document or image using [Get Render Task Results](#!/RenderTask/get_render_task_results). + // + // + rpc CreateLookRenderTask(CreateLookRenderTaskRequest) returns (stream CreateLookRenderTaskResponse); + + // ### Create a new task to render an existing query to an image. + // + // Returns a render task object. + // To check the status of a render task, pass the render_task.id to [Get Render Task](#!/RenderTask/get_render_task). + // Once the render task is complete, you can download the resulting document or image using [Get Render Task Results](#!/RenderTask/get_render_task_results). + // + // + rpc CreateQueryRenderTask(CreateQueryRenderTaskRequest) returns (stream CreateQueryRenderTaskResponse); + + // ### Create a new task to render a dashboard to a document or image. + // + // Returns a render task object. + // To check the status of a render task, pass the render_task.id to [Get Render Task](#!/RenderTask/get_render_task). + // Once the render task is complete, you can download the resulting document or image using [Get Render Task Results](#!/RenderTask/get_render_task_results). + // + // + rpc CreateDashboardRenderTask(CreateDashboardRenderTaskRequest) returns (stream CreateDashboardRenderTaskResponse); + + // ### Get information about a render task. + // + // Returns a render task object. + // To check the status of a render task, pass the render_task.id to [Get Render Task](#!/RenderTask/get_render_task). + // Once the render task is complete, you can download the resulting document or image using [Get Render Task Results](#!/RenderTask/get_render_task_results). + // + // + rpc RenderTask(RenderTaskRequest) returns (stream RenderTaskResponse); + + // ### Get the document or image produced by a completed render task. + // + // Note that the PDF or image result will be a binary blob in the HTTP response, as indicated by the + // Content-Type in the response headers. This may require specialized (or at least different) handling than text + // responses such as JSON. You may need to tell your HTTP client that the response is binary so that it does not + // attempt to parse the binary data as text. + // + // If the render task exists but has not finished rendering the results, the response HTTP status will be + // **202 Accepted**, the response body will be empty, and the response will have a Retry-After header indicating + // that the caller should repeat the request at a later time. + // + // Returns 404 if the render task cannot be found, if the cached result has expired, or if the caller + // does not have permission to view the results. + // + // For detailed information about the status of the render task, use [Render Task](#!/RenderTask/render_task). + // Polling loops waiting for completion of a render task would be better served by polling **render_task(id)** until + // the task status reaches completion (or error) instead of polling **render_task_results(id)** alone. + // + rpc RenderTaskResults(RenderTaskResultsRequest) returns (stream RenderTaskResultsResponse); + + + + // Role: Manage Roles + + // ### Search model sets + // Returns all model set records that match the given search criteria. + // If multiple search params are given and `filter_or` is FALSE or not specified, + // search params are combined in a logical AND operation. + // Only rows that match *all* search param criteria will be returned. + // + // If `filter_or` is TRUE, multiple search params are combined in a logical OR operation. + // Results will include rows that match **any** of the search criteria. + // + // String search params use case-insensitive matching. + // String search params can contain `%` and '_' as SQL LIKE pattern match wildcard expressions. + // example="dan%" will match "danger" and "Danzig" but not "David" + // example="D_m%" will match "Damage" and "dump" + // + // Integer search params can accept a single value or a comma separated list of values. The multiple + // values will be combined under a logical OR operation - results will match at least one of + // the given values. + // + // Most search params can accept "IS NULL" and "NOT NULL" as special expressions to match + // or exclude (respectively) rows where the column is null. + // + // Boolean search params accept only "true" and "false" as values. + // + // + rpc SearchModelSets(SearchModelSetsRequest) returns (stream SearchModelSetsStreamResponse); + + // ### Get information about the model set with a specific id. + // + rpc ModelSet(ModelSetRequest) returns (stream ModelSetResponse); + + // ### Update information about the model set with a specific id. + // + rpc UpdateModelSet(UpdateModelSetRequest) returns (stream UpdateModelSetResponse); + + // ### Delete the model set with a specific id. + // + rpc DeleteModelSet(DeleteModelSetRequest) returns (stream DeleteModelSetResponse); + + // ### Get information about all model sets. + // + rpc AllModelSets(AllModelSetsRequest) returns (stream AllModelSetsStreamResponse); + + // ### Create a model set with the specified information. Model sets are used by Roles. + // + rpc CreateModelSet(CreateModelSetRequest) returns (stream CreateModelSetResponse); + + // ### Get all supported permissions. + // + rpc AllPermissions(AllPermissionsRequest) returns (stream AllPermissionsStreamResponse); + + // ### Search permission sets + // Returns all permission set records that match the given search criteria. + // If multiple search params are given and `filter_or` is FALSE or not specified, + // search params are combined in a logical AND operation. + // Only rows that match *all* search param criteria will be returned. + // + // If `filter_or` is TRUE, multiple search params are combined in a logical OR operation. + // Results will include rows that match **any** of the search criteria. + // + // String search params use case-insensitive matching. + // String search params can contain `%` and '_' as SQL LIKE pattern match wildcard expressions. + // example="dan%" will match "danger" and "Danzig" but not "David" + // example="D_m%" will match "Damage" and "dump" + // + // Integer search params can accept a single value or a comma separated list of values. The multiple + // values will be combined under a logical OR operation - results will match at least one of + // the given values. + // + // Most search params can accept "IS NULL" and "NOT NULL" as special expressions to match + // or exclude (respectively) rows where the column is null. + // + // Boolean search params accept only "true" and "false" as values. + // + // + rpc SearchPermissionSets(SearchPermissionSetsRequest) returns (stream SearchPermissionSetsStreamResponse); + + // ### Get information about the permission set with a specific id. + // + rpc PermissionSet(PermissionSetRequest) returns (stream PermissionSetResponse); + + // ### Update information about the permission set with a specific id. + // + rpc UpdatePermissionSet(UpdatePermissionSetRequest) returns (stream UpdatePermissionSetResponse); + + // ### Delete the permission set with a specific id. + // + rpc DeletePermissionSet(DeletePermissionSetRequest) returns (stream DeletePermissionSetResponse); + + // ### Get information about all permission sets. + // + rpc AllPermissionSets(AllPermissionSetsRequest) returns (stream AllPermissionSetsStreamResponse); + + // ### Create a permission set with the specified information. Permission sets are used by Roles. + // + rpc CreatePermissionSet(CreatePermissionSetRequest) returns (stream CreatePermissionSetResponse); + + // ### Get information about all roles. + // + rpc AllRoles(AllRolesRequest) returns (stream AllRolesStreamResponse); + + // ### Create a role with the specified information. + // + rpc CreateRole(CreateRoleRequest) returns (stream CreateRoleResponse); + + // ### Search roles + // + // Returns all role records that match the given search criteria. + // + // If multiple search params are given and `filter_or` is FALSE or not specified, + // search params are combined in a logical AND operation. + // Only rows that match *all* search param criteria will be returned. + // + // If `filter_or` is TRUE, multiple search params are combined in a logical OR operation. + // Results will include rows that match **any** of the search criteria. + // + // String search params use case-insensitive matching. + // String search params can contain `%` and '_' as SQL LIKE pattern match wildcard expressions. + // example="dan%" will match "danger" and "Danzig" but not "David" + // example="D_m%" will match "Damage" and "dump" + // + // Integer search params can accept a single value or a comma separated list of values. The multiple + // values will be combined under a logical OR operation - results will match at least one of + // the given values. + // + // Most search params can accept "IS NULL" and "NOT NULL" as special expressions to match + // or exclude (respectively) rows where the column is null. + // + // Boolean search params accept only "true" and "false" as values. + // + // + rpc SearchRoles(SearchRolesRequest) returns (stream SearchRolesStreamResponse); + + // ### Get information about the role with a specific id. + // + rpc Role(RoleRequest) returns (stream RoleResponse); + + // ### Update information about the role with a specific id. + // + rpc UpdateRole(UpdateRoleRequest) returns (stream UpdateRoleResponse); + + // ### Delete the role with a specific id. + // + rpc DeleteRole(DeleteRoleRequest) returns (stream DeleteRoleResponse); + + // ### Get information about all the groups with the role that has a specific id. + // + rpc RoleGroups(RoleGroupsRequest) returns (stream RoleGroupsStreamResponse); + + // ### Set all groups for a role, removing all existing group associations from that role. + // + rpc SetRoleGroups(SetRoleGroupsRequest) returns (stream SetRoleGroupsStreamResponse); + + // ### Get information about all the users with the role that has a specific id. + // + rpc RoleUsers(RoleUsersRequest) returns (stream RoleUsersStreamResponse); + + // ### Set all the users of the role with a specific id. + // + rpc SetRoleUsers(SetRoleUsersRequest) returns (stream SetRoleUsersStreamResponse); + + + + // ScheduledPlan: Manage Scheduled Plans + + // ### Get Scheduled Plans for a Space + // + // Returns scheduled plans owned by the caller for a given space id. + // + rpc ScheduledPlansForSpace(ScheduledPlansForSpaceRequest) returns (stream ScheduledPlansForSpaceStreamResponse); + + // ### Get Information About a Scheduled Plan + // + // Admins can fetch information about other users' Scheduled Plans. + // + rpc ScheduledPlan(ScheduledPlanRequest) returns (stream ScheduledPlanResponse); + + // ### Update a Scheduled Plan + // + // Admins can update other users' Scheduled Plans. + // + // Note: Any scheduled plan destinations specified in an update will **replace** all scheduled plan destinations + // currently defined for the scheduled plan. + // + // For Example: If a scheduled plan has destinations A, B, and C, and you call update on this scheduled plan + // specifying only B in the destinations, then destinations A and C will be deleted by the update. + // + // Updating a scheduled plan to assign null or an empty array to the scheduled_plan_destinations property is an error, as a scheduled plan must always have at least one destination. + // + // If you omit the scheduled_plan_destinations property from the object passed to update, then the destinations + // defined on the original scheduled plan will remain unchanged. + // + // #### Email Permissions: + // + // For details about permissions required to schedule delivery to email and the safeguards + // Looker offers to protect against sending to unauthorized email destinations, see [Email Domain Whitelist for Scheduled Looks](https://docs.looker.com/r/api/embed-permissions). + // + // + // #### Scheduled Plan Destination Formats + // + // Scheduled plan destinations must specify the data format to produce and send to the destination. + // + // Formats: + // + // | format | Description + // | :-----------: | :--- | + // | json | A JSON object containing a `data` property which contains an array of JSON objects, one per row. No metadata. + // | json_detail | Row data plus metadata describing the fields, pivots, table calcs, and other aspects of the query + // | inline_json | Same as the JSON format, except that the `data` property is a string containing JSON-escaped row data. Additional properties describe the data operation. This format is primarily used to send data to web hooks so that the web hook doesn't have to re-encode the JSON row data in order to pass it on to its ultimate destination. + // | csv | Comma separated values with a header + // | txt | Tab separated values with a header + // | html | Simple html + // | xlsx | MS Excel spreadsheet + // | wysiwyg_pdf | Dashboard rendered in a tiled layout to produce a PDF document + // | assembled_pdf | Dashboard rendered in a single column layout to produce a PDF document + // | wysiwyg_png | Dashboard rendered in a tiled layout to produce a PNG image + // || + // + // Valid formats vary by destination type and source object. `wysiwyg_pdf` is only valid for dashboards, for example. + // + // + // + rpc UpdateScheduledPlan(UpdateScheduledPlanRequest) returns (stream UpdateScheduledPlanResponse); + + // ### Delete a Scheduled Plan + // + // Normal users can only delete their own scheduled plans. + // Admins can delete other users' scheduled plans. + // This delete cannot be undone. + // + rpc DeleteScheduledPlan(DeleteScheduledPlanRequest) returns (stream DeleteScheduledPlanResponse); + + // ### List All Scheduled Plans + // + // Returns all scheduled plans which belong to the caller or given user. + // + // If no user_id is provided, this function returns the scheduled plans owned by the caller. + // + // + // To list all schedules for all users, pass `all_users=true`. + // + // + // The caller must have `see_schedules` permission to see other users' scheduled plans. + // + // + // + rpc AllScheduledPlans(AllScheduledPlansRequest) returns (stream AllScheduledPlansStreamResponse); + + // ### Create a Scheduled Plan + // + // Create a scheduled plan to render a Look or Dashboard on a recurring schedule. + // + // To create a scheduled plan, you MUST provide values for the following fields: + // `name` + // and + // `look_id`, `dashboard_id`, `lookml_dashboard_id`, or `query_id` + // and + // `cron_tab` or `datagroup` + // and + // at least one scheduled_plan_destination + // + // A scheduled plan MUST have at least one scheduled_plan_destination defined. + // + // When `look_id` is set, `require_no_results`, `require_results`, and `require_change` are all required. + // + // If `create_scheduled_plan` fails with a 422 error, be sure to look at the error messages in the response which will explain exactly what fields are missing or values that are incompatible. + // + // The queries that provide the data for the look or dashboard are run in the context of user account that owns the scheduled plan. + // + // When `run_as_recipient` is `false` or not specified, the queries that provide the data for the + // look or dashboard are run in the context of user account that owns the scheduled plan. + // + // When `run_as_recipient` is `true` and all the email recipients are Looker user accounts, the + // queries are run in the context of each recipient, so different recipients may see different + // data from the same scheduled render of a look or dashboard. For more details, see [Run As Recipient](https://looker.com/docs/r/admin/run-as-recipient). + // + // Admins can create and modify scheduled plans on behalf of other users by specifying a user id. + // Non-admin users may not create or modify scheduled plans by or for other users. + // + // #### Email Permissions: + // + // For details about permissions required to schedule delivery to email and the safeguards + // Looker offers to protect against sending to unauthorized email destinations, see [Email Domain Whitelist for Scheduled Looks](https://docs.looker.com/r/api/embed-permissions). + // + // + // #### Scheduled Plan Destination Formats + // + // Scheduled plan destinations must specify the data format to produce and send to the destination. + // + // Formats: + // + // | format | Description + // | :-----------: | :--- | + // | json | A JSON object containing a `data` property which contains an array of JSON objects, one per row. No metadata. + // | json_detail | Row data plus metadata describing the fields, pivots, table calcs, and other aspects of the query + // | inline_json | Same as the JSON format, except that the `data` property is a string containing JSON-escaped row data. Additional properties describe the data operation. This format is primarily used to send data to web hooks so that the web hook doesn't have to re-encode the JSON row data in order to pass it on to its ultimate destination. + // | csv | Comma separated values with a header + // | txt | Tab separated values with a header + // | html | Simple html + // | xlsx | MS Excel spreadsheet + // | wysiwyg_pdf | Dashboard rendered in a tiled layout to produce a PDF document + // | assembled_pdf | Dashboard rendered in a single column layout to produce a PDF document + // | wysiwyg_png | Dashboard rendered in a tiled layout to produce a PNG image + // || + // + // Valid formats vary by destination type and source object. `wysiwyg_pdf` is only valid for dashboards, for example. + // + // + // + rpc CreateScheduledPlan(CreateScheduledPlanRequest) returns (stream CreateScheduledPlanResponse); + + // ### Run a Scheduled Plan Immediately + // + // Create a scheduled plan that runs only once, and immediately. + // + // This can be useful for testing a Scheduled Plan before committing to a production schedule. + // + // Admins can create scheduled plans on behalf of other users by specifying a user id. + // + // This API is rate limited to prevent it from being used for relay spam or DoS attacks + // + // #### Email Permissions: + // + // For details about permissions required to schedule delivery to email and the safeguards + // Looker offers to protect against sending to unauthorized email destinations, see [Email Domain Whitelist for Scheduled Looks](https://docs.looker.com/r/api/embed-permissions). + // + // + // #### Scheduled Plan Destination Formats + // + // Scheduled plan destinations must specify the data format to produce and send to the destination. + // + // Formats: + // + // | format | Description + // | :-----------: | :--- | + // | json | A JSON object containing a `data` property which contains an array of JSON objects, one per row. No metadata. + // | json_detail | Row data plus metadata describing the fields, pivots, table calcs, and other aspects of the query + // | inline_json | Same as the JSON format, except that the `data` property is a string containing JSON-escaped row data. Additional properties describe the data operation. This format is primarily used to send data to web hooks so that the web hook doesn't have to re-encode the JSON row data in order to pass it on to its ultimate destination. + // | csv | Comma separated values with a header + // | txt | Tab separated values with a header + // | html | Simple html + // | xlsx | MS Excel spreadsheet + // | wysiwyg_pdf | Dashboard rendered in a tiled layout to produce a PDF document + // | assembled_pdf | Dashboard rendered in a single column layout to produce a PDF document + // | wysiwyg_png | Dashboard rendered in a tiled layout to produce a PNG image + // || + // + // Valid formats vary by destination type and source object. `wysiwyg_pdf` is only valid for dashboards, for example. + // + // + // + rpc ScheduledPlanRunOnce(ScheduledPlanRunOnceRequest) returns (stream ScheduledPlanRunOnceResponse); + + // ### Get Scheduled Plans for a Look + // + // Returns all scheduled plans for a look which belong to the caller or given user. + // + // If no user_id is provided, this function returns the scheduled plans owned by the caller. + // + // + // To list all schedules for all users, pass `all_users=true`. + // + // + // The caller must have `see_schedules` permission to see other users' scheduled plans. + // + // + // + rpc ScheduledPlansForLook(ScheduledPlansForLookRequest) returns (stream ScheduledPlansForLookStreamResponse); + + // ### Get Scheduled Plans for a Dashboard + // + // Returns all scheduled plans for a dashboard which belong to the caller or given user. + // + // If no user_id is provided, this function returns the scheduled plans owned by the caller. + // + // + // To list all schedules for all users, pass `all_users=true`. + // + // + // The caller must have `see_schedules` permission to see other users' scheduled plans. + // + // + // + rpc ScheduledPlansForDashboard(ScheduledPlansForDashboardRequest) returns (stream ScheduledPlansForDashboardStreamResponse); + + // ### Get Scheduled Plans for a LookML Dashboard + // + // Returns all scheduled plans for a LookML Dashboard which belong to the caller or given user. + // + // If no user_id is provided, this function returns the scheduled plans owned by the caller. + // + // + // To list all schedules for all users, pass `all_users=true`. + // + // + // The caller must have `see_schedules` permission to see other users' scheduled plans. + // + // + // + rpc ScheduledPlansForLookmlDashboard(ScheduledPlansForLookmlDashboardRequest) returns (stream ScheduledPlansForLookmlDashboardStreamResponse); + + // ### Run a Scheduled Plan By Id Immediately + // This function creates a run-once schedule plan based on an existing scheduled plan, + // applies modifications (if any) to the new scheduled plan, and runs the new schedule plan immediately. + // This can be useful for testing modifications to an existing scheduled plan before committing to a production schedule. + // + // This function internally performs the following operations: + // + // 1. Copies the properties of the existing scheduled plan into a new scheduled plan + // 2. Copies any properties passed in the JSON body of this request into the new scheduled plan (replacing the original values) + // 3. Creates the new scheduled plan + // 4. Runs the new scheduled plan + // + // The original scheduled plan is not modified by this operation. + // Admins can create, modify, and run scheduled plans on behalf of other users by specifying a user id. + // Non-admins can only create, modify, and run their own scheduled plans. + // + // #### Email Permissions: + // + // For details about permissions required to schedule delivery to email and the safeguards + // Looker offers to protect against sending to unauthorized email destinations, see [Email Domain Whitelist for Scheduled Looks](https://docs.looker.com/r/api/embed-permissions). + // + // + // #### Scheduled Plan Destination Formats + // + // Scheduled plan destinations must specify the data format to produce and send to the destination. + // + // Formats: + // + // | format | Description + // | :-----------: | :--- | + // | json | A JSON object containing a `data` property which contains an array of JSON objects, one per row. No metadata. + // | json_detail | Row data plus metadata describing the fields, pivots, table calcs, and other aspects of the query + // | inline_json | Same as the JSON format, except that the `data` property is a string containing JSON-escaped row data. Additional properties describe the data operation. This format is primarily used to send data to web hooks so that the web hook doesn't have to re-encode the JSON row data in order to pass it on to its ultimate destination. + // | csv | Comma separated values with a header + // | txt | Tab separated values with a header + // | html | Simple html + // | xlsx | MS Excel spreadsheet + // | wysiwyg_pdf | Dashboard rendered in a tiled layout to produce a PDF document + // | assembled_pdf | Dashboard rendered in a single column layout to produce a PDF document + // | wysiwyg_png | Dashboard rendered in a tiled layout to produce a PNG image + // || + // + // Valid formats vary by destination type and source object. `wysiwyg_pdf` is only valid for dashboards, for example. + // + // + // + // This API is rate limited to prevent it from being used for relay spam or DoS attacks + // + // + rpc ScheduledPlanRunOnceById(ScheduledPlanRunOnceByIdRequest) returns (stream ScheduledPlanRunOnceByIdResponse); + + + + // Session: Session Information + + // ### Get API Session + // + // Returns information about the current API session, such as which workspace is selected for the session. + // + rpc Session(SessionRequest) returns (stream SessionResponse); + + // ### Update API Session + // + // #### API Session Workspace + // + // You can use this endpoint to change the active workspace for the current API session. + // + // Only one workspace can be active in a session. The active workspace can be changed + // any number of times in a session. + // + // The default workspace for API sessions is the "production" workspace. + // + // All Looker APIs that use projects or lookml models (such as running queries) will + // use the version of project and model files defined by this workspace for the lifetime of the + // current API session or until the session workspace is changed again. + // + // An API session has the same lifetime as the access_token used to authenticate API requests. Each successful + // API login generates a new access_token and a new API session. + // + // If your Looker API client application needs to work in a dev workspace across multiple + // API sessions, be sure to select the dev workspace after each login. + // + rpc UpdateSession(UpdateSessionRequest) returns (stream UpdateSessionResponse); + + + + // Theme: Manage Themes + + // ### Get an array of all existing themes + // + // Get a **single theme** by id with [Theme](#!/Theme/theme) + // + // This method returns an array of all existing themes. The active time for the theme is not considered. + // + // **Note**: Custom themes needs to be enabled by Looker. Unless custom themes are enabled, only the automatically generated default theme can be used. Please contact your Account Manager or help.looker.com to update your license for this feature. + // + // + rpc AllThemes(AllThemesRequest) returns (stream AllThemesStreamResponse); + + // ### Create a theme + // + // Creates a new theme object, returning the theme details, including the created id. + // + // If `settings` are not specified, the default theme settings will be copied into the new theme. + // + // The theme `name` can only contain alphanumeric characters or underscores. Theme names should not contain any confidential information, such as customer names. + // + // **Update** an existing theme with [Update Theme](#!/Theme/update_theme) + // + // **Permanently delete** an existing theme with [Delete Theme](#!/Theme/delete_theme) + // + // For more information, see [Creating and Applying Themes](https://looker.com/docs/r/admin/themes). + // + // **Note**: Custom themes needs to be enabled by Looker. Unless custom themes are enabled, only the automatically generated default theme can be used. Please contact your Account Manager or help.looker.com to update your license for this feature. + // + // + rpc CreateTheme(CreateThemeRequest) returns (stream CreateThemeResponse); + + // ### Search all themes for matching criteria. + // + // Returns an **array of theme objects** that match the specified search criteria. + // + // | Search Parameters | Description + // | :-------------------: | :------ | + // | `begin_at` only | Find themes active at or after `begin_at` + // | `end_at` only | Find themes active at or before `end_at` + // | both set | Find themes with an active inclusive period between `begin_at` and `end_at` + // + // Note: Range matching requires boolean AND logic. + // When using `begin_at` and `end_at` together, do not use `filter_or`=TRUE + // + // If multiple search params are given and `filter_or` is FALSE or not specified, + // search params are combined in a logical AND operation. + // Only rows that match *all* search param criteria will be returned. + // + // If `filter_or` is TRUE, multiple search params are combined in a logical OR operation. + // Results will include rows that match **any** of the search criteria. + // + // String search params use case-insensitive matching. + // String search params can contain `%` and '_' as SQL LIKE pattern match wildcard expressions. + // example="dan%" will match "danger" and "Danzig" but not "David" + // example="D_m%" will match "Damage" and "dump" + // + // Integer search params can accept a single value or a comma separated list of values. The multiple + // values will be combined under a logical OR operation - results will match at least one of + // the given values. + // + // Most search params can accept "IS NULL" and "NOT NULL" as special expressions to match + // or exclude (respectively) rows where the column is null. + // + // Boolean search params accept only "true" and "false" as values. + // + // + // Get a **single theme** by id with [Theme](#!/Theme/theme) + // + // **Note**: Custom themes needs to be enabled by Looker. Unless custom themes are enabled, only the automatically generated default theme can be used. Please contact your Account Manager or help.looker.com to update your license for this feature. + // + // + rpc SearchThemes(SearchThemesRequest) returns (stream SearchThemesStreamResponse); + + // ### Get the default theme + // + // Returns the active theme object set as the default. + // + // The **default** theme name can be set in the UI on the Admin|Theme UI page + // + // The optional `ts` parameter can specify a different timestamp than "now." If specified, it returns the default theme at the time indicated. + // + rpc DefaultTheme(DefaultThemeRequest) returns (stream DefaultThemeResponse); + + // ### Set the global default theme by theme name + // + // Only Admin users can call this function. + // + // Only an active theme with no expiration (`end_at` not set) can be assigned as the default theme. As long as a theme has an active record with no expiration, it can be set as the default. + // + // [Create Theme](#!/Theme/create) has detailed information on rules for default and active themes + // + // Returns the new specified default theme object. + // + // **Note**: Custom themes needs to be enabled by Looker. Unless custom themes are enabled, only the automatically generated default theme can be used. Please contact your Account Manager or help.looker.com to update your license for this feature. + // + // + rpc SetDefaultTheme(SetDefaultThemeRequest) returns (stream SetDefaultThemeResponse); + + // ### Get active themes + // + // Returns an array of active themes. + // + // If the `name` parameter is specified, it will return an array with one theme if it's active and found. + // + // The optional `ts` parameter can specify a different timestamp than "now." + // + // **Note**: Custom themes needs to be enabled by Looker. Unless custom themes are enabled, only the automatically generated default theme can be used. Please contact your Account Manager or help.looker.com to update your license for this feature. + // + // + // + rpc ActiveThemes(ActiveThemesRequest) returns (stream ActiveThemesStreamResponse); + + // ### Get the named theme if it's active. Otherwise, return the default theme + // + // The optional `ts` parameter can specify a different timestamp than "now." + // Note: API users with `show` ability can call this function + // + // **Note**: Custom themes needs to be enabled by Looker. Unless custom themes are enabled, only the automatically generated default theme can be used. Please contact your Account Manager or help.looker.com to update your license for this feature. + // + // + rpc ThemeOrDefault(ThemeOrDefaultRequest) returns (stream ThemeOrDefaultResponse); + + // ### Validate a theme with the specified information + // + // Validates all values set for the theme, returning any errors encountered, or 200 OK if valid + // + // See [Create Theme](#!/Theme/create_theme) for constraints + // + // **Note**: Custom themes needs to be enabled by Looker. Unless custom themes are enabled, only the automatically generated default theme can be used. Please contact your Account Manager or help.looker.com to update your license for this feature. + // + // + rpc ValidateTheme(ValidateThemeRequest) returns (stream ValidateThemeResponse); + + // ### Get a theme by ID + // + // Use this to retrieve a specific theme, whether or not it's currently active. + // + // **Note**: Custom themes needs to be enabled by Looker. Unless custom themes are enabled, only the automatically generated default theme can be used. Please contact your Account Manager or help.looker.com to update your license for this feature. + // + // + rpc Theme(ThemeRequest) returns (stream ThemeResponse); + + // ### Update the theme by id. + // + // **Note**: Custom themes needs to be enabled by Looker. Unless custom themes are enabled, only the automatically generated default theme can be used. Please contact your Account Manager or help.looker.com to update your license for this feature. + // + // + rpc UpdateTheme(UpdateThemeRequest) returns (stream UpdateThemeResponse); + + // ### Delete a specific theme by id + // + // This operation permanently deletes the identified theme from the database. + // + // Because multiple themes can have the same name (with different activation time spans) themes can only be deleted by ID. + // + // All IDs associated with a theme name can be retrieved by searching for the theme name with [Theme Search](#!/Theme/search). + // + // **Note**: Custom themes needs to be enabled by Looker. Unless custom themes are enabled, only the automatically generated default theme can be used. Please contact your Account Manager or help.looker.com to update your license for this feature. + // + // + rpc DeleteTheme(DeleteThemeRequest) returns (stream DeleteThemeResponse); + + + + // User: Manage Users + + // ### Get information about the current user; i.e. the user account currently calling the API. + // + rpc Me(MeRequest) returns (stream MeResponse); + + // ### Get information about all users. + // + rpc AllUsers(AllUsersRequest) returns (stream AllUsersStreamResponse); + + // ### Create a user with the specified information. + // + rpc CreateUser(CreateUserRequest) returns (stream CreateUserResponse); + + // ### Search users + // + // Returns all* user records that match the given search criteria. + // + // If multiple search params are given and `filter_or` is FALSE or not specified, + // search params are combined in a logical AND operation. + // Only rows that match *all* search param criteria will be returned. + // + // If `filter_or` is TRUE, multiple search params are combined in a logical OR operation. + // Results will include rows that match **any** of the search criteria. + // + // String search params use case-insensitive matching. + // String search params can contain `%` and '_' as SQL LIKE pattern match wildcard expressions. + // example="dan%" will match "danger" and "Danzig" but not "David" + // example="D_m%" will match "Damage" and "dump" + // + // Integer search params can accept a single value or a comma separated list of values. The multiple + // values will be combined under a logical OR operation - results will match at least one of + // the given values. + // + // Most search params can accept "IS NULL" and "NOT NULL" as special expressions to match + // or exclude (respectively) rows where the column is null. + // + // Boolean search params accept only "true" and "false" as values. + // + // + // (*) Results are always filtered to the level of information the caller is permitted to view. + // Looker admins can see all user details; normal users in an open system can see + // names of other users but no details; normal users in a closed system can only see + // names of other users who are members of the same group as the user. + // + // + rpc SearchUsers(SearchUsersRequest) returns (stream SearchUsersStreamResponse); + + // ### Search for user accounts by name + // + // Returns all user accounts where `first_name` OR `last_name` OR `email` field values match a pattern. + // The pattern can contain `%` and `_` wildcards as in SQL LIKE expressions. + // + // Any additional search params will be combined into a logical AND expression. + // + rpc SearchUsersNames(SearchUsersNamesRequest) returns (stream SearchUsersNamesStreamResponse); + + // ### Get information about the user with a specific id. + // + // If the caller is an admin or the caller is the user being specified, then full user information will + // be returned. Otherwise, a minimal 'public' variant of the user information will be returned. This contains + // The user name and avatar url, but no sensitive information. + // + rpc User(UserRequest) returns (stream UserResponse); + + // ### Update information about the user with a specific id. + // + rpc UpdateUser(UpdateUserRequest) returns (stream UpdateUserResponse); + + // ### Delete the user with a specific id. + // + // **DANGER** this will delete the user and all looks and other information owned by the user. + // + rpc DeleteUser(DeleteUserRequest) returns (stream DeleteUserResponse); + + // ### Get information about the user with a credential of given type with specific id. + // + // This is used to do things like find users by their embed external_user_id. Or, find the user with + // a given api3 client_id, etc. The 'credential_type' matchs the 'type' name of the various credential + // types. It must be one of the values listed in the table below. The 'credential_id' is your unique Id + // for the user and is specific to each type of credential. + // + // An example using the Ruby sdk might look like: + // + // `sdk.user_for_credential('embed', 'customer-4959425')` + // + // This table shows the supported 'Credential Type' strings. The right column is for reference; it shows + // which field in the given credential type is actually searched when finding a user with the supplied + // 'credential_id'. + // + // | Credential Types | Id Field Matched | + // | ---------------- | ---------------- | + // | email | email | + // | google | google_user_id | + // | saml | saml_user_id | + // | oidc | oidc_user_id | + // | ldap | ldap_id | + // | api | token | + // | api3 | client_id | + // | embed | external_user_id | + // | looker_openid | email | + // + // NOTE: The 'api' credential type was only used with the legacy Looker query API and is no longer supported. The credential type for API you are currently looking at is 'api3'. + // + // + rpc UserForCredential(UserForCredentialRequest) returns (stream UserForCredentialResponse); + + // ### Email/password login information for the specified user. + rpc UserCredentialsEmail(UserCredentialsEmailRequest) returns (stream UserCredentialsEmailResponse); + + // ### Email/password login information for the specified user. + rpc CreateUserCredentialsEmail(CreateUserCredentialsEmailRequest) returns (stream CreateUserCredentialsEmailResponse); + + // ### Email/password login information for the specified user. + rpc UpdateUserCredentialsEmail(UpdateUserCredentialsEmailRequest) returns (stream UpdateUserCredentialsEmailResponse); + + // ### Email/password login information for the specified user. + rpc DeleteUserCredentialsEmail(DeleteUserCredentialsEmailRequest) returns (stream DeleteUserCredentialsEmailResponse); + + // ### Two-factor login information for the specified user. + rpc UserCredentialsTotp(UserCredentialsTotpRequest) returns (stream UserCredentialsTotpResponse); + + // ### Two-factor login information for the specified user. + rpc CreateUserCredentialsTotp(CreateUserCredentialsTotpRequest) returns (stream CreateUserCredentialsTotpResponse); + + // ### Two-factor login information for the specified user. + rpc DeleteUserCredentialsTotp(DeleteUserCredentialsTotpRequest) returns (stream DeleteUserCredentialsTotpResponse); + + // ### LDAP login information for the specified user. + rpc UserCredentialsLdap(UserCredentialsLdapRequest) returns (stream UserCredentialsLdapResponse); + + // ### LDAP login information for the specified user. + rpc DeleteUserCredentialsLdap(DeleteUserCredentialsLdapRequest) returns (stream DeleteUserCredentialsLdapResponse); + + // ### Google authentication login information for the specified user. + rpc UserCredentialsGoogle(UserCredentialsGoogleRequest) returns (stream UserCredentialsGoogleResponse); + + // ### Google authentication login information for the specified user. + rpc DeleteUserCredentialsGoogle(DeleteUserCredentialsGoogleRequest) returns (stream DeleteUserCredentialsGoogleResponse); + + // ### Saml authentication login information for the specified user. + rpc UserCredentialsSaml(UserCredentialsSamlRequest) returns (stream UserCredentialsSamlResponse); + + // ### Saml authentication login information for the specified user. + rpc DeleteUserCredentialsSaml(DeleteUserCredentialsSamlRequest) returns (stream DeleteUserCredentialsSamlResponse); + + // ### OpenID Connect (OIDC) authentication login information for the specified user. + rpc UserCredentialsOidc(UserCredentialsOidcRequest) returns (stream UserCredentialsOidcResponse); + + // ### OpenID Connect (OIDC) authentication login information for the specified user. + rpc DeleteUserCredentialsOidc(DeleteUserCredentialsOidcRequest) returns (stream DeleteUserCredentialsOidcResponse); + + // ### API 3 login information for the specified user. This is for the newer API keys that can be added for any user. + rpc UserCredentialsApi3(UserCredentialsApi3Request) returns (stream UserCredentialsApi3Response); + + // ### API 3 login information for the specified user. This is for the newer API keys that can be added for any user. + rpc DeleteUserCredentialsApi3(DeleteUserCredentialsApi3Request) returns (stream DeleteUserCredentialsApi3Response); + + // ### API 3 login information for the specified user. This is for the newer API keys that can be added for any user. + rpc AllUserCredentialsApi3s(AllUserCredentialsApi3sRequest) returns (stream AllUserCredentialsApi3sStreamResponse); + + // ### API 3 login information for the specified user. This is for the newer API keys that can be added for any user. + rpc CreateUserCredentialsApi3(CreateUserCredentialsApi3Request) returns (stream CreateUserCredentialsApi3Response); + + // ### Embed login information for the specified user. + rpc UserCredentialsEmbed(UserCredentialsEmbedRequest) returns (stream UserCredentialsEmbedResponse); + + // ### Embed login information for the specified user. + rpc DeleteUserCredentialsEmbed(DeleteUserCredentialsEmbedRequest) returns (stream DeleteUserCredentialsEmbedResponse); + + // ### Embed login information for the specified user. + rpc AllUserCredentialsEmbeds(AllUserCredentialsEmbedsRequest) returns (stream AllUserCredentialsEmbedsStreamResponse); + + // ### Looker Openid login information for the specified user. Used by Looker Analysts. + rpc UserCredentialsLookerOpenid(UserCredentialsLookerOpenidRequest) returns (stream UserCredentialsLookerOpenidResponse); + + // ### Looker Openid login information for the specified user. Used by Looker Analysts. + rpc DeleteUserCredentialsLookerOpenid(DeleteUserCredentialsLookerOpenidRequest) returns (stream DeleteUserCredentialsLookerOpenidResponse); + + // ### Web login session for the specified user. + rpc UserSession(UserSessionRequest) returns (stream UserSessionResponse); + + // ### Web login session for the specified user. + rpc DeleteUserSession(DeleteUserSessionRequest) returns (stream DeleteUserSessionResponse); + + // ### Web login session for the specified user. + rpc AllUserSessions(AllUserSessionsRequest) returns (stream AllUserSessionsStreamResponse); + + // ### Create a password reset token. + // This will create a cryptographically secure random password reset token for the user. + // If the user already has a password reset token then this invalidates the old token and creates a new one. + // The token is expressed as the 'password_reset_url' of the user's email/password credential object. + // This takes an optional 'expires' param to indicate if the new token should be an expiring token. + // Tokens that expire are typically used for self-service password resets for existing users. + // Invitation emails for new users typically are not set to expire. + // The expire period is always 60 minutes when expires is enabled. + // This method can be called with an empty body. + // + rpc CreateUserCredentialsEmailPasswordReset(CreateUserCredentialsEmailPasswordResetRequest) returns (stream CreateUserCredentialsEmailPasswordResetResponse); + + // ### Get information about roles of a given user + // + rpc UserRoles(UserRolesRequest) returns (stream UserRolesStreamResponse); + + // ### Set roles of the user with a specific id. + // + rpc SetUserRoles(SetUserRolesRequest) returns (stream SetUserRolesStreamResponse); + + // ### Get user attribute values for a given user. + // + // Returns the values of specified user attributes (or all user attributes) for a certain user. + // + // A value for each user attribute is searched for in the following locations, in this order: + // + // 1. in the user's account information + // 1. in groups that the user is a member of + // 1. the default value of the user attribute + // + // If more than one group has a value defined for a user attribute, the group with the lowest rank wins. + // + // The response will only include user attributes for which values were found. Use `include_unset=true` to include + // empty records for user attributes with no value. + // + // The value of all hidden user attributes will be blank. + // + rpc UserAttributeUserValues(UserAttributeUserValuesRequest) returns (stream UserAttributeUserValuesStreamResponse); + + // ### Store a custom value for a user attribute in a user's account settings. + // + // Per-user user attribute values take precedence over group or default values. + // + rpc SetUserAttributeUserValue(SetUserAttributeUserValueRequest) returns (stream SetUserAttributeUserValueResponse); + + // ### Delete a user attribute value from a user's account settings. + // + // After the user attribute value is deleted from the user's account settings, subsequent requests + // for the user attribute value for this user will draw from the user's groups or the default + // value of the user attribute. See [Get User Attribute Values](#!/User/user_attribute_user_values) for more + // information about how user attribute values are resolved. + // + rpc DeleteUserAttributeUserValue(DeleteUserAttributeUserValueRequest) returns (stream DeleteUserAttributeUserValueResponse); + + // ### Send a password reset token. + // This will send a password reset email to the user. If a password reset token does not already exist + // for this user, it will create one and then send it. + // If the user has not yet set up their account, it will send a setup email to the user. + // The URL sent in the email is expressed as the 'password_reset_url' of the user's email/password credential object. + // Password reset URLs will expire in 60 minutes. + // This method can be called with an empty body. + // + rpc SendUserCredentialsEmailPasswordReset(SendUserCredentialsEmailPasswordResetRequest) returns (stream SendUserCredentialsEmailPasswordResetResponse); + + + + // UserAttribute: Manage User Attributes + + // ### Get information about all user attributes. + // + rpc AllUserAttributes(AllUserAttributesRequest) returns (stream AllUserAttributesStreamResponse); + + // ### Create a new user attribute + // + // Permission information for a user attribute is conveyed through the `can` and `user_can_edit` fields. + // The `user_can_edit` field indicates whether an attribute is user-editable _anywhere_ in the application. + // The `can` field gives more granular access information, with the `set_value` child field indicating whether + // an attribute's value can be set by [Setting the User Attribute User Value](#!/User/set_user_attribute_user_value). + // + // Note: `name` and `label` fields must be unique across all user attributes in the Looker instance. + // Attempting to create a new user attribute with a name or label that duplicates an existing + // user attribute will fail with a 422 error. + // + rpc CreateUserAttribute(CreateUserAttributeRequest) returns (stream CreateUserAttributeResponse); + + // ### Get information about a user attribute. + // + rpc UserAttribute(UserAttributeRequest) returns (stream UserAttributeResponse); + + // ### Update a user attribute definition. + // + rpc UpdateUserAttribute(UpdateUserAttributeRequest) returns (stream UpdateUserAttributeResponse); + + // ### Delete a user attribute (admin only). + // + rpc DeleteUserAttribute(DeleteUserAttributeRequest) returns (stream DeleteUserAttributeResponse); + + // ### Returns all values of a user attribute defined by user groups, in precedence order. + // + // A user may be a member of multiple groups which define different values for a given user attribute. + // The order of group-values in the response determines precedence for selecting which group-value applies + // to a given user. For more information, see [Set User Attribute Group Values](#!/UserAttribute/set_user_attribute_group_values). + // + // Results will only include groups that the caller's user account has permission to see. + // + rpc AllUserAttributeGroupValues(AllUserAttributeGroupValuesRequest) returns (stream AllUserAttributeGroupValuesStreamResponse); + + // ### Define values for a user attribute across a set of groups, in priority order. + // + // This function defines all values for a user attribute defined by user groups. This is a global setting, potentially affecting + // all users in the system. This function replaces any existing group value definitions for the indicated user attribute. + // + // The value of a user attribute for a given user is determined by searching the following locations, in this order: + // + // 1. the user's account settings + // 2. the groups that the user is a member of + // 3. the default value of the user attribute, if any + // + // The user may be a member of multiple groups which define different values for that user attribute. The order of items in the group_values parameter + // determines which group takes priority for that user. Lowest array index wins. + // + // An alternate method to indicate the selection precedence of group-values is to assign numbers to the 'rank' property of each + // group-value object in the array. Lowest 'rank' value wins. If you use this technique, you must assign a + // rank value to every group-value object in the array. + // + // To set a user attribute value for a single user, see [Set User Attribute User Value](#!/User/set_user_attribute_user_value). + // To set a user attribute value for all members of a group, see [Set User Attribute Group Value](#!/Group/update_user_attribute_group_value). + // + rpc SetUserAttributeGroupValues(SetUserAttributeGroupValuesRequest) returns (stream SetUserAttributeGroupValuesStreamResponse); + + + + // Workspace: Manage Workspaces + + // ### Get All Workspaces + // + // Returns all workspaces available to the calling user. + // + rpc AllWorkspaces(AllWorkspacesRequest) returns (stream AllWorkspacesStreamResponse); + + // ### Get A Workspace + // + // Returns information about a workspace such as the git status and selected branches + // of all projects available to the caller's user account. + // + // A workspace defines which versions of project files will be used to evaluate expressions + // and operations that use model definitions - operations such as running queries or rendering dashboards. + // Each project has its own git repository, and each project in a workspace may be configured to reference + // particular branch or revision within their respective repositories. + // + // There are two predefined workspaces available: "production" and "dev". + // + // The production workspace is shared across all Looker users. Models in the production workspace are read-only. + // Changing files in production is accomplished by modifying files in a git branch and using Pull Requests + // to merge the changes from the dev branch into the production branch, and then telling + // Looker to sync with production. + // + // The dev workspace is local to each Looker user. Changes made to project/model files in the dev workspace only affect + // that user, and only when the dev workspace is selected as the active workspace for the API session. + // (See set_session_workspace()). + // + // The dev workspace is NOT unique to an API session. Two applications accessing the Looker API using + // the same user account will see the same files in the dev workspace. To avoid collisions between + // API clients it's best to have each client login with API3 credentials for a different user account. + // + // Changes made to files in a dev workspace are persistent across API sessions. It's a good + // idea to commit any changes you've made to the git repository, but not strictly required. Your modified files + // reside in a special user-specific directory on the Looker server and will still be there when you login in again + // later and use update_session(workspace_id: "dev") to select the dev workspace for the new API session. + // + rpc Workspace(WorkspaceRequest) returns (stream WorkspaceResponse); + + +} \ No newline at end of file diff --git a/proto/grpc_proxy/src/main/resources/simplelogger.properties b/proto/grpc_proxy/src/main/resources/simplelogger.properties new file mode 100644 index 000000000..0ad2bafe4 --- /dev/null +++ b/proto/grpc_proxy/src/main/resources/simplelogger.properties @@ -0,0 +1,2 @@ +org.slf4j.simpleLogger.defaultLogLevel=info +org.slf4j.simpleLogger.log.com.google.looker=debug diff --git a/proto/grpc_proxy/src/test/java/com/google/looker/test/AuthorizationTests.java b/proto/grpc_proxy/src/test/java/com/google/looker/test/AuthorizationTests.java new file mode 100644 index 000000000..c5db3650b --- /dev/null +++ b/proto/grpc_proxy/src/test/java/com/google/looker/test/AuthorizationTests.java @@ -0,0 +1,48 @@ +package com.google.looker.test; + +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; + +import com.google.looker.client.LookerGrpcClient; +import com.google.looker.common.Constants; +import com.google.looker.grpc.services.AccessToken; +import com.google.looker.grpc.services.LoginRequest; +import com.google.looker.grpc.services.LoginResponse; +import com.google.looker.grpc.services.LookerServiceGrpc; +import javax.net.ssl.SSLException; +import org.junit.jupiter.api.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class AuthorizationTests { + + final private static Logger LOGGER = LoggerFactory.getLogger(AuthorizationTests.class); + + @Test + void rawLogin() throws SSLException { + LOGGER.debug("run login test"); + LookerGrpcClient lookerGrpcClient = new LookerGrpcClient(); + LookerServiceGrpc.LookerServiceBlockingStub stub = lookerGrpcClient.getLookerServiceBlockingStub(); + LoginResponse loginResponse = stub.login( + LoginRequest + .newBuilder() + .setClientId(System.getProperty(Constants.LOOKER_CLIENT_ID)) + .setClientSecret(System.getProperty(Constants.LOOKER_CLIENT_SECRET)) + .build() + ); + AccessToken accessToken = loginResponse.getResult(); + assertNotNull(accessToken); + assertNotNull(accessToken.getAccessToken()); + } + + @Test + void clientLogout() throws SSLException { + LOGGER.debug("run login test"); + LookerGrpcClient lookerGrpcClient = new LookerGrpcClient(); + lookerGrpcClient.login(); + assertNotNull(lookerGrpcClient.getAccessToken()); + lookerGrpcClient.logout(); + assertNull(lookerGrpcClient.getAccessToken()); + } + +} diff --git a/proto/grpc_proxy/src/test/java/com/google/looker/test/ConnectionTests.java b/proto/grpc_proxy/src/test/java/com/google/looker/test/ConnectionTests.java new file mode 100644 index 000000000..b238e9dc0 --- /dev/null +++ b/proto/grpc_proxy/src/test/java/com/google/looker/test/ConnectionTests.java @@ -0,0 +1,155 @@ +package com.google.looker.test; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import com.google.looker.client.LookerGrpcClient; +import com.google.looker.grpc.services.AllConnectionsRequest; +import com.google.looker.grpc.services.AllConnectionsResponse; +import com.google.looker.grpc.services.ConnectionRequest; +import com.google.looker.grpc.services.ConnectionResponse; +import com.google.looker.grpc.services.CreateConnectionRequest; +import com.google.looker.grpc.services.CreateConnectionResponse; +import com.google.looker.grpc.services.DBConnection; +import com.google.looker.grpc.services.DeleteConnectionRequest; +import com.google.looker.grpc.services.DeleteConnectionResponse; +import com.google.looker.grpc.services.LookerServiceGrpc; +import com.google.looker.grpc.services.TestConnectionRequest; +import com.google.looker.grpc.services.TestConnectionResponse; +import com.google.looker.grpc.services.UpdateConnectionRequest; +import com.google.looker.grpc.services.UpdateConnectionResponse; +import javax.net.ssl.SSLException; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class ConnectionTests { + + final private static Logger LOGGER = LoggerFactory.getLogger(ConnectionTests.class); + final private LookerGrpcClient lookerGrpcClient = new LookerGrpcClient(); + private LookerServiceGrpc.LookerServiceBlockingStub stub; + final private static String TEST_CONNECTION_NAME = "testconnection"; + + @BeforeEach + void login() throws SSLException { + lookerGrpcClient.login(); + stub = lookerGrpcClient.getLookerServiceBlockingStub(); + } + + @AfterEach + void logout() throws SSLException { + stub = null; + lookerGrpcClient.logout(); + } + + @Test + void connections() throws SSLException { + LOGGER.debug("run connections test"); + // Get all connections + AllConnectionsResponse allConnectionsResponse = stub + .allConnections( + AllConnectionsRequest + .newBuilder() + .build() + ); + assertTrue(allConnectionsResponse.getResultCount() > 0); + + // Get a single connection + String connectionName = allConnectionsResponse.getResult(0).getName(); + ConnectionResponse connectionResponse = stub + .connection( + ConnectionRequest + .newBuilder() + .setConnectionName(connectionName) + .setFields("name") + .build() + ); + assertNotNull(connectionResponse.getResult()); + assertEquals(connectionName, connectionResponse.getResult().getName()); + + // Create a connection + cleanupTestConnection(); + CreateConnectionResponse createConnectionResponse = stub.createConnection( + CreateConnectionRequest + .newBuilder() + .setBody( + DBConnection + .newBuilder() + .setName(TEST_CONNECTION_NAME) + .setDialectName("mysql") + .setHost("db1.looker.com") + .setPort(3306) + .setUsername(System.getProperty("TEST_LOOKER_USERNAME") + "X") + .setPassword(System.getProperty("TEST_LOOKER_PASSWORD")) + .setDatabase("demo_db2") + .setTmpDbName("looker_demo_scratch") + .build() + ) + .build() + ); + assertNotNull(createConnectionResponse.getResult()); + assertEquals(TEST_CONNECTION_NAME, createConnectionResponse.getResult().getName()); + + // Update connection + UpdateConnectionResponse updateConnectionResponse = stub.updateConnection( + UpdateConnectionRequest + .newBuilder() + .setConnectionName(TEST_CONNECTION_NAME) + .setBody( + DBConnection + .newBuilder() + .setUsername(System.getProperty("TEST_LOOKER_USERNAME")) + .build() + ) + .build() + ); + assertNotNull(updateConnectionResponse.getResult()); + assertEquals(System.getProperty("TEST_LOOKER_USERNAME"), + updateConnectionResponse.getResult().getUsername()); + + // Test connection + TestConnectionResponse testConnectionResponse = stub.testConnection(TestConnectionRequest + .newBuilder() + .setConnectionName(System.getProperty("TEST_CONNECTION_NAME")) + .setTests("connect") + .build()); + assertTrue(testConnectionResponse.getResultCount() > 0); + assertEquals("Can connect", testConnectionResponse.getResult(0).getMessage()); + + // Delete connection + DeleteConnectionResponse deleteConnectionResponse = stub.deleteConnection( + DeleteConnectionRequest + .newBuilder() + .setConnectionName(TEST_CONNECTION_NAME) + .build() + ); + assertNotNull(deleteConnectionResponse.getResult()); + assertEquals("", deleteConnectionResponse.getResult()); + } + + private void cleanupTestConnection() { + try { + stub.connection( + ConnectionRequest + .newBuilder() + .setConnectionName(TEST_CONNECTION_NAME) + .setFields("name") + .build() + ); + stub.deleteConnection( + DeleteConnectionRequest + .newBuilder() + .setConnectionName(TEST_CONNECTION_NAME) + .build() + ); + } catch(RuntimeException e) { + // noop - not supposed to be there + } + + } + + // +} diff --git a/proto/grpc_proxy/src/test/java/com/google/looker/test/PingTests.java b/proto/grpc_proxy/src/test/java/com/google/looker/test/PingTests.java new file mode 100644 index 000000000..57918bd2d --- /dev/null +++ b/proto/grpc_proxy/src/test/java/com/google/looker/test/PingTests.java @@ -0,0 +1,26 @@ +package com.google.looker.test; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import com.google.looker.client.LookerGrpcClient; +import com.google.looker.grpc.services.PingServiceGrpc; +import com.google.looker.server.rtl.PingRequest; +import javax.net.ssl.SSLException; +import org.junit.jupiter.api.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class PingTests { + + final private static Logger LOGGER = LoggerFactory.getLogger(PingTests.class); + + @Test + void ping() throws SSLException { + LOGGER.debug("run ping test"); + LookerGrpcClient lookerGrpcClient = new LookerGrpcClient(); + PingServiceGrpc.PingServiceBlockingStub stub = lookerGrpcClient.getPingBlockingStub(); + boolean active = stub.ping((PingRequest.newBuilder().build())).getActive(); + assertEquals(true, active); + } + +} diff --git a/proto/grpc_proxy/src/test/java/com/google/looker/test/StreamingUserTests.java b/proto/grpc_proxy/src/test/java/com/google/looker/test/StreamingUserTests.java new file mode 100644 index 000000000..cf3e25774 --- /dev/null +++ b/proto/grpc_proxy/src/test/java/com/google/looker/test/StreamingUserTests.java @@ -0,0 +1,72 @@ +package com.google.looker.test; + +import static org.junit.jupiter.api.Assertions.assertTrue; + +import com.google.looker.client.LookerGrpcClient; +import com.google.looker.grpc.services.AllUsersRequest; +import com.google.looker.grpc.services.AllUsersStreamResponse; +import com.google.looker.grpc.services.LookerStreamingServiceGrpc; +import com.google.looker.grpc.services.User; +import io.grpc.stub.StreamObserver; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; +import javax.net.ssl.SSLException; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class StreamingUserTests { + + final private static Logger LOGGER = LoggerFactory.getLogger(StreamingUserTests.class); + final private LookerGrpcClient lookerGrpcClient = new LookerGrpcClient(); + private LookerStreamingServiceGrpc.LookerStreamingServiceStub stub; + + @BeforeEach + void login() throws SSLException { + lookerGrpcClient.login(); + stub = lookerGrpcClient.getLookerStreamingServiceStub(); + } + + @AfterEach + void logout() throws SSLException { + stub = null; + lookerGrpcClient.logout(); + } + + @Test + void allUsers() throws InterruptedException { + LOGGER.debug("run allUsers test"); + CountDownLatch latch = new CountDownLatch(1); + List users = new ArrayList<>(); + int[] countChunks = {0}; + StreamObserver responseObserver = new StreamObserver() { + + @Override + public void onNext(AllUsersStreamResponse value) { + countChunks[0] += 1; + users.add(value.getResult()); + } + + @Override + public void onError(Throwable t) { + } + + @Override + public void onCompleted() { + latch.countDown(); + } + }; + stub.allUsers( + AllUsersRequest.newBuilder().build(), + responseObserver + ); + latch.await(3, TimeUnit.SECONDS); + assertTrue(countChunks[0] > 0); + assertTrue(users.size() > 0); + } + +} diff --git a/proto/grpc_proxy/src/test/resources/simplelogger.properties b/proto/grpc_proxy/src/test/resources/simplelogger.properties new file mode 100644 index 000000000..358c38a13 --- /dev/null +++ b/proto/grpc_proxy/src/test/resources/simplelogger.properties @@ -0,0 +1,2 @@ +org.slf4j.simpleLogger.defaultLogLevel=info +org.slf4j.simpleLogger.log.com.google.looker=info