diff --git a/src/deploy/functions/build.ts b/src/deploy/functions/build.ts index 867ccda3b22..48c79949232 100644 --- a/src/deploy/functions/build.ts +++ b/src/deploy/functions/build.ts @@ -147,7 +147,7 @@ export interface TaskQueueTrigger { invoker?: Array> | null; } -interface ScheduleRetryConfig { +export interface ScheduleRetryConfig { retryCount?: Field; maxRetrySeconds?: Field; minBackoffSeconds?: Field; @@ -157,8 +157,8 @@ interface ScheduleRetryConfig { export interface ScheduleTrigger { schedule: string | Expression; - timeZone: string | Expression; - retryConfig: ScheduleRetryConfig; + timeZone?: string | Expression; + retryConfig?: ScheduleRetryConfig; } export type Triggered = @@ -364,77 +364,97 @@ function discoverTrigger(endpoint: Endpoint): backend.Triggered { } else if ("scheduleTrigger" in endpoint) { const bkSchedule: backend.ScheduleTrigger = { schedule: resolveString(endpoint.scheduleTrigger.schedule), - timeZone: resolveString(endpoint.scheduleTrigger.timeZone), }; - proto.renameIfPresent( - bkSchedule, - endpoint.scheduleTrigger, - "retryConfig", - "retryConfig", - resolveInt - ); + proto.convertIfPresent(bkSchedule, endpoint.scheduleTrigger, "timeZone", resolveString); + if (endpoint.scheduleTrigger.retryConfig) { + // TODO: consider changing backend.ScheduleRetryConfig to use + // number seconds instead of proto.duration and only using + // duration at the GCP API layer. In general numbers >> durations. + bkSchedule.retryConfig = {}; + proto.convertIfPresent( + bkSchedule.retryConfig, + endpoint.scheduleTrigger.retryConfig, + "maxDoublings", + resolveInt + ); + proto.renameIfPresent( + bkSchedule.retryConfig, + endpoint.scheduleTrigger.retryConfig, + "minBackoffDuration", + "minBackoffSeconds", + (seconds) => proto.durationFromSeconds(resolveInt(seconds)) + ); + proto.renameIfPresent( + bkSchedule.retryConfig, + endpoint.scheduleTrigger.retryConfig, + "maxBackoffDuration", + "maxBackoffSeconds", + (seconds) => proto.durationFromSeconds(resolveInt(seconds)) + ); + proto.renameIfPresent( + bkSchedule.retryConfig, + endpoint.scheduleTrigger.retryConfig, + "maxRetryDuration", + "maxRetrySeconds", + (seconds) => proto.durationFromSeconds(resolveInt(seconds)) + ); + proto.convertIfPresent( + bkSchedule.retryConfig, + endpoint.scheduleTrigger.retryConfig, + "retryCount", + resolveInt + ); + } trigger = { scheduleTrigger: bkSchedule }; } else if ("taskQueueTrigger" in endpoint) { const bkTaskQueue: backend.TaskQueueTrigger = {}; if (endpoint.taskQueueTrigger.rateLimits) { const bkRateLimits: backend.TaskQueueRateLimits = {}; - proto.renameIfPresent( + proto.convertIfPresent( bkRateLimits, endpoint.taskQueueTrigger.rateLimits, "maxConcurrentDispatches", - "maxConcurrentDispatches", resolveInt ); - proto.renameIfPresent( + proto.convertIfPresent( bkRateLimits, endpoint.taskQueueTrigger.rateLimits, "maxDispatchesPerSecond", - "maxDispatchesPerSecond", resolveInt ); bkTaskQueue.rateLimits = bkRateLimits; } if (endpoint.taskQueueTrigger.retryConfig) { const bkRetryConfig: backend.TaskQueueRetryConfig = {}; - proto.renameIfPresent( + proto.convertIfPresent( bkRetryConfig, endpoint.taskQueueTrigger.retryConfig, "maxAttempts", - "maxAttempts", resolveInt ); - proto.renameIfPresent( + proto.convertIfPresent( bkRetryConfig, endpoint.taskQueueTrigger.retryConfig, "maxBackoffSeconds", - "maxBackoffSeconds", - (from: number | Expression | null): string => { - return proto.durationFromSeconds(resolveInt(from)); - } + resolveInt ); - proto.renameIfPresent( + proto.convertIfPresent( bkRetryConfig, endpoint.taskQueueTrigger.retryConfig, "minBackoffSeconds", - "minBackoffSeconds", - (from: number | Expression | null): string => { - return proto.durationFromSeconds(resolveInt(from)); - } + resolveInt ); proto.renameIfPresent( bkRetryConfig, endpoint.taskQueueTrigger.retryConfig, "maxRetrySeconds", "maxRetryDurationSeconds", - (from: number | Expression | null): string => { - return proto.durationFromSeconds(resolveInt(from)); - } + resolveInt ); - proto.renameIfPresent( + proto.convertIfPresent( bkRetryConfig, endpoint.taskQueueTrigger.retryConfig, "maxDoublings", - "maxDoublings", resolveInt ); bkTaskQueue.retryConfig = bkRetryConfig; diff --git a/src/deploy/functions/runtimes/discovery/v1alpha1.ts b/src/deploy/functions/runtimes/discovery/v1alpha1.ts index 5324acce314..5079e7ab724 100644 --- a/src/deploy/functions/runtimes/discovery/v1alpha1.ts +++ b/src/deploy/functions/runtimes/discovery/v1alpha1.ts @@ -257,7 +257,7 @@ function parseEndpoints( ep, "secretEnvironmentVariables", "secretEnvironmentVariables", - (senvs: Array) => { + (senvs) => { const secretEnvironmentVariables: backend.SecretEnvVar[] = []; for (const { key, secret } of senvs) { secretEnvironmentVariables.push({ diff --git a/src/deploy/functions/runtimes/node/parseTriggers.ts b/src/deploy/functions/runtimes/node/parseTriggers.ts index 7155796ae9d..f7a7d0435de 100644 --- a/src/deploy/functions/runtimes/node/parseTriggers.ts +++ b/src/deploy/functions/runtimes/node/parseTriggers.ts @@ -266,13 +266,40 @@ export function addResourcesToBuild( api: "cloudscheduler.googleapis.com", reason: "Needed for scheduled functions.", }); + triggered = { scheduleTrigger: { schedule: annotation.schedule.schedule, - timeZone: annotation.schedule.timeZone || "what's the default timezone?", - retryConfig: annotation.schedule.retryConfig || {}, }, }; + proto.copyIfPresent(triggered.scheduleTrigger, annotation.schedule, "timeZone"); + if (annotation.schedule.retryConfig) { + const retryConfig: build.ScheduleRetryConfig = {}; + const from: ScheduleRetryConfig = annotation.schedule.retryConfig || {}; + proto.copyIfPresent(retryConfig, from, "maxDoublings", "retryCount"); + proto.renameIfPresent( + retryConfig, + from, + "minBackoffSeconds", + "minBackoffDuration", + proto.secondsFromDuration + ); + proto.renameIfPresent( + retryConfig, + from, + "maxBackoffSeconds", + "maxBackoffDuration", + proto.secondsFromDuration + ); + proto.renameIfPresent( + retryConfig, + from, + "maxRetrySeconds", + "maxRetryDuration", + proto.secondsFromDuration + ); + triggered.scheduleTrigger.retryConfig = retryConfig; + } } else if (annotation.blockingTrigger) { if (events.v1.AUTH_BLOCKING_EVENTS.includes(annotation.blockingTrigger.eventType as any)) { want.requiredAPIs.push({ diff --git a/src/gcp/cloudfunctionsv2.ts b/src/gcp/cloudfunctionsv2.ts index 4dd48f20740..1fdf9771351 100644 --- a/src/gcp/cloudfunctionsv2.ts +++ b/src/gcp/cloudfunctionsv2.ts @@ -211,12 +211,12 @@ const BYTES_PER_UNIT: Record = { }; /** - * Returns the float-precision number of Mega(not Mebi)bytes in a + * Returns the float-precision number of Mebibytes in a * Kubernetes-style quantity * Must serve the same results as * https://github.com/kubernetes/kubernetes/blob/master/staging/src/k8s.io/apimachinery/pkg/api/resource/quantity.go */ -export function mebibytes(memory: string): number { +export function mebibytes(memory: string): backend.MemoryOptions { const re = /^([0-9]+(\.[0-9]*)?)(Ki|Mi|Gi|Ti|k|M|G|T|([eE]([0-9]+)))?$/; const matches = re.exec(memory); if (!matches) { @@ -230,7 +230,12 @@ export function mebibytes(memory: string): number { const suffix = matches[3] || ""; bytes = quantity * BYTES_PER_UNIT[suffix as MemoryUnit]; } - return bytes / (1 << 20); + bytes = bytes / (1 << 20); + const typed = bytes as backend.MemoryOptions; + if (!backend.AllMemoryOptions.includes(typed)) { + logger.warn(`Got a non-standard memory option ${memory}; this may break tools`); + } + return typed; } /** @@ -454,10 +459,16 @@ export function functionFromEndpoint(endpoint: backend.Endpoint, source: Storage "ingressSettings", "timeoutSeconds" ); - // Memory must be set because the default value of GCF gen 2 is Megabytes and - // we use mebibytes - const mem: number = endpoint.availableMemoryMb || backend.DEFAULT_MEMORY; - gcfFunction.serviceConfig.availableMemory = mem > 1024 ? `${mem / 1024}Gi` : `${mem}Mi`; + // Due to reverse compatibility with GCF gen 1, GCF defaults to 256MB instead + // of 256MiB. This is despite the fact that it seems like GCF gen 1's "MB" is + // GCF gen 2's "MiB" (or at least we interpreted it that way in the past). + // 128MiB functions break if we use MB instead of MiB so we always use MiB. + // This means though that we need to default explicitly to 256MiB over 256MB + // otherwise we read 256MB as 244.1MiB on update and break Pantheon which + // expects an enum of memories and doesn't care that 244.1MiB == 256MB. + const mb = endpoint.availableMemoryMb || backend.DEFAULT_MEMORY; + gcfFunction.serviceConfig.availableMemory = mb > 1024 ? `${mb / 1024}Gi` : `${mb}Mi`; + proto.renameIfPresent(gcfFunction.serviceConfig, endpoint, "minInstanceCount", "minInstances"); proto.renameIfPresent(gcfFunction.serviceConfig, endpoint, "maxInstanceCount", "maxInstances"); diff --git a/src/gcp/proto.ts b/src/gcp/proto.ts index 21f4edcf50f..df013b2a418 100644 --- a/src/gcp/proto.ts +++ b/src/gcp/proto.ts @@ -62,21 +62,48 @@ export function copyIfPresent( } } -export function renameIfPresent( +/** + * + */ +export function renameIfPresent< + Dest extends object, + DestKey extends keyof Dest, + Src extends object, + SrcKey extends keyof Src +>( dest: Dest, src: Src, - destField: keyof Dest, - srcField: keyof Src, - converter: (from: any) => any = (from: any) => { - return from; + destField: DestKey, + srcField: SrcKey, + converter: (from: Required[SrcKey]) => Required[DestKey] = ( + from: Required[SrcKey] + ): Required[DestKey] => { + return from as any; } -) { +): void { if (!Object.prototype.hasOwnProperty.call(src, srcField)) { return; } dest[destField] = converter(src[srcField]); } +/** Copy a field from one interface to another after transforming it with a lamdba */ +export function convertIfPresent< + Dest extends object, + Src extends object, + Key extends keyof Dest & keyof Src +>( + dest: Dest, + src: Src, + key: Key, + converter: (from: Required[Key]) => Required[Key] +): void { + if (!Object.prototype.hasOwnProperty.call(src, key)) { + return; + } + dest[key] = converter(src[key]); +} + // eslint-enable @typescript-eslint/no-unsafe-returns @typescript-eslint/no-explicit-any /** @@ -128,8 +155,7 @@ function fieldMasksHelper( * Gets the correctly invoker members to be used with the invoker role for IAM API calls. * @param invoker the array of non-formatted invoker members * @param projectId the ID of the current project - * @returns an array of correctly formatted invoker members - * + * @return an array of correctly formatted invoker members * @throws {@link FirebaseError} if any invoker string is empty or not of the correct form */ export function getInvokerMembers(invoker: string[], projectId: string): string[] { @@ -147,8 +173,7 @@ export function getInvokerMembers(invoker: string[], projectId: string): string[ * '{service-account}@' or '{service-account}@{project}.iam.gserviceaccount.com'. * @param serviceAccount the custom service account created by the user * @param projectId the ID of the current project - * @returns a correctly formatted service account string - * + * @return a correctly formatted service account string * @throws {@link FirebaseError} if the supplied service account string is empty or not of the correct form */ export function formatServiceAccount(serviceAccount: string, projectId: string): string { diff --git a/src/test/deploy/functions/runtimes/node/parseTriggers.spec.ts b/src/test/deploy/functions/runtimes/node/parseTriggers.spec.ts index 87d0b58be10..f825889b7f1 100644 --- a/src/test/deploy/functions/runtimes/node/parseTriggers.spec.ts +++ b/src/test/deploy/functions/runtimes/node/parseTriggers.spec.ts @@ -283,7 +283,7 @@ describe("addResourcesToBuild", () => { }); it("should support schedules, yielding a build reversibly equivalent to the corresponding backend", () => { - const schedule = { + const schedule: parseTriggers.ScheduleAnnotation = { schedule: "every 10 minutes", timeZone: "America/Los_Angeles", retryConfig: { @@ -319,7 +319,17 @@ describe("addResourcesToBuild", () => { const expected: build.Build = build.of({ func: { ...BASIC_ENDPOINT, - scheduleTrigger: schedule, + scheduleTrigger: { + schedule: "every 10 minutes", + timeZone: "America/Los_Angeles", + retryConfig: { + retryCount: 20, + maxRetrySeconds: 200, + minBackoffSeconds: 1, + maxBackoffSeconds: 10, + maxDoublings: 10, + }, + }, labels: { test: "testing", },