Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
26 commits
Select commit Hold shift + click to select a range
dc1a652
Support webhooks in Data Connect behind an experiment flag.
rosalyntan Nov 7, 2025
5623de9
Update existing logic to specifically refer to the main schema.
rosalyntan Nov 7, 2025
c9e92f9
Fix build errors.
rosalyntan Nov 8, 2025
82685ad
lint
rosalyntan Nov 10, 2025
16f7fae
Support loading schema sources from either `schema` or `schemas` field.
rosalyntan Nov 11, 2025
5ebfe69
Fix firebase init
rosalyntan Nov 11, 2025
58f3899
Properly deploy secondary schemas as well.
rosalyntan Nov 12, 2025
50c99ac
Make `getSchema` take a schema ID, move secondary schema upsert after…
rosalyntan Nov 12, 2025
c763393
Merge branch 'master' into rosalyntan.webhook
rosalyntan Nov 12, 2025
ee2401b
Fix listSchemas in `firebase init`.
rosalyntan Nov 13, 2025
8f4a5be
Fix unit tests.
rosalyntan Nov 14, 2025
09ce673
Merge branch 'master' into rosalyntan.webhook
rosalyntan Nov 14, 2025
852350a
Actually fix unit tests.
rosalyntan Nov 14, 2025
bbb977b
Add secondary schema template.
rosalyntan Nov 17, 2025
110d976
Set default parameter for getSchema schemaId.
rosalyntan Nov 17, 2025
740439e
Specify just top-level fields in listSchema call.
rosalyntan Nov 17, 2025
799a1c3
Merge branch 'master' into rosalyntan.webhook
rosalyntan Nov 17, 2025
b6a5bf0
Merge branch 'rosalyntan.webhook' into rosalyntan.download
rosalyntan Nov 17, 2025
2fcebd8
Fix VSCode unit tests.
rosalyntan Nov 17, 2025
fd64351
Merge branch 'rosalyntan.webhook' into rosalyntan.download
rosalyntan Nov 17, 2025
a89e086
Merge branch 'master' into rosalyntan.download
rosalyntan Nov 17, 2025
5594185
Download and write secondary schema files in `firebase init`.
rosalyntan Nov 18, 2025
e3370a2
Fix error in file path and actually write secondary schema files.
rosalyntan Nov 19, 2025
1b9a4e9
Fix template replacement formatting.
rosalyntan Nov 19, 2025
573a770
Merge branch 'master' into rosalyntan.download
rosalyntan Nov 19, 2025
74f59ed
Merge branch 'master' into rosalyntan.download
rosalyntan Nov 19, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
108 changes: 86 additions & 22 deletions src/init/features/dataconnect/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
createService,
upsertSchema,
} from "../../../dataconnect/client";
import { Schema, Service, File, MAIN_SCHEMA_ID, mainSchema } from "../../../dataconnect/types";
import { Schema, Service, File, MAIN_SCHEMA_ID, isMainSchema } from "../../../dataconnect/types";
import { parseCloudSQLInstanceName, parseServiceName } from "../../../dataconnect/names";
import { logger } from "../../../logger";
import { readTemplateSync } from "../../../templates";
Expand Down Expand Up @@ -48,6 +48,7 @@
const DATACONNECT_WEBHOOKS_YAML_TEMPLATE = readTemplateSync(
"init/dataconnect/dataconnect-fdcwebhooks.yaml",
);
const SECONDARY_SCHEMA_YAML_TEMPLATE = readTemplateSync("init/dataconnect/secondary_schema.yaml");
const CONNECTOR_YAML_TEMPLATE = readTemplateSync("init/dataconnect/connector.yaml");
const SCHEMA_TEMPLATE = readTemplateSync("init/dataconnect/schema.gql");
const QUERIES_TEMPLATE = readTemplateSync("init/dataconnect/queries.gql");
Expand Down Expand Up @@ -83,6 +84,11 @@
path: string;
files: File[];
}[];
secondarySchemaGqls?: {
id: string;
files: File[];
uri: string;
}[];
seedDataGql?: string;
}

Expand All @@ -109,7 +115,7 @@

// askQuestions prompts the user about the Data Connect service they want to init. Any prompting
// logic should live here, and _no_ actuation logic should live here.
export async function askQuestions(setup: Setup): Promise<void> {

Check warning on line 118 in src/init/features/dataconnect/index.ts

View workflow job for this annotation

GitHub Actions / lint (20)

Missing JSDoc comment
const info: RequiredInfo = {
flow: "",
appDescription: "",
Expand Down Expand Up @@ -162,10 +168,10 @@

// actuate writes product specific files and makes product specifc API calls.
// It does not handle writing firebase.json and .firebaserc
export async function actuate(setup: Setup, config: Config, options: any): Promise<void> {

Check warning on line 171 in src/init/features/dataconnect/index.ts

View workflow job for this annotation

GitHub Actions / lint (20)

Unexpected any. Specify a different type

Check warning on line 171 in src/init/features/dataconnect/index.ts

View workflow job for this annotation

GitHub Actions / lint (20)

Missing JSDoc comment
// Most users will want to persist data between emulator runs, so set this to a reasonable default.
const dir: string = config.get("dataconnect.source", "dataconnect");

Check warning on line 173 in src/init/features/dataconnect/index.ts

View workflow job for this annotation

GitHub Actions / lint (20)

Unsafe assignment of an `any` value
const dataDir = config.get("emulators.dataconnect.dataDir", `${dir}/.dataconnect/pgliteData`);

Check warning on line 174 in src/init/features/dataconnect/index.ts

View workflow job for this annotation

GitHub Actions / lint (20)

Unsafe assignment of an `any` value
config.set("emulators.dataconnect.dataDir", dataDir);

const info = setup.featureInfo?.dataconnect;
Expand Down Expand Up @@ -207,7 +213,7 @@
setup.instructions.push(
`You can visualize the Data Connect Schema in Firebase Console:

https://console.firebase.google.com/project/${setup.projectId!}/dataconnect/locations/${info.locationId}/services/${info.serviceId}/schema`,

Check warning on line 216 in src/init/features/dataconnect/index.ts

View workflow job for this annotation

GitHub Actions / lint (20)

Forbidden non-null assertion
);
}
if (!(await isBillingEnabled(setup))) {
Expand All @@ -222,7 +228,7 @@
setup: Setup,
config: Config,
info: RequiredInfo,
options: any,

Check warning on line 231 in src/init/features/dataconnect/index.ts

View workflow job for this annotation

GitHub Actions / lint (20)

Unexpected any. Specify a different type
): Promise<void> {
const projectId = setup.projectId;
if (!projectId) {
Expand Down Expand Up @@ -329,7 +335,7 @@
{ schemaGql: schemaFiles, connectors: connectors, seedDataGql: seedDataGql },
options,
);
} catch (err: any) {

Check warning on line 338 in src/init/features/dataconnect/index.ts

View workflow job for this annotation

GitHub Actions / lint (20)

Unexpected any. Specify a different type
logLabeledError("dataconnect", `Operation Generation failed...`);
// GiF generate operation API has stability concerns.
// Fallback to save only the generated schema.
Expand Down Expand Up @@ -403,13 +409,16 @@
config: Config,
info: RequiredInfo,
serviceGql: ServiceGQL,
options: any,

Check warning on line 412 in src/init/features/dataconnect/index.ts

View workflow job for this annotation

GitHub Actions / lint (20)

Unexpected any. Specify a different type
): Promise<void> {
const dir: string = config.get("dataconnect.source") || "dataconnect";

Check warning on line 414 in src/init/features/dataconnect/index.ts

View workflow job for this annotation

GitHub Actions / lint (20)

Unsafe assignment of an `any` value
const subbedDataconnectYaml = subDataconnectYamlValues({
...info,
connectorDirs: serviceGql.connectors.map((c) => c.path),
});
const subbedDataconnectYaml = subDataconnectYamlValues(
{
...info,
connectorDirs: serviceGql.connectors.map((c) => c.path),
},
serviceGql.secondarySchemaGqls?.map((sch) => ({ id: sch.id, uri: sch.uri })),
);
config.set("dataconnect", { source: dir });
await config.askWriteProjectFile(
join(dir, "dataconnect.yaml"),
Expand All @@ -435,6 +444,17 @@
// Even if the schema is empty, lets give them an empty .gql file to get started.
fs.ensureFileSync(join(dir, "schema", "schema.gql"));
}
if (serviceGql.secondarySchemaGqls?.length) {
for (const sch of serviceGql.secondarySchemaGqls) {
for (const f of sch.files) {
await config.askWriteProjectFile(
join(dir, `schema_${sch.id}`, f.path),
f.content,
!!options.force,
);
}
}
}

for (const c of serviceGql.connectors) {
await writeConnectorFiles(config, c, options);
Expand Down Expand Up @@ -468,23 +488,54 @@
}
}

function subDataconnectYamlValues(replacementValues: {
serviceId: string;
cloudSqlInstanceId: string;
cloudSqlDatabase: string;
connectorDirs: string[];
locationId: string;
}): string {
function subDataconnectYamlValues(
replacementValues: {
serviceId: string;
cloudSqlInstanceId: string;
cloudSqlDatabase: string;
connectorDirs: string[];
locationId: string;
},
secondarySchemas?: {
id: string;
uri: string;
}[],
): string {
const replacements: Record<string, string> = {
serviceId: "__serviceId__",
locationId: "__location__",
cloudSqlDatabase: "__cloudSqlDatabase__",
cloudSqlInstanceId: "__cloudSqlInstanceId__",
connectorDirs: "__connectorDirs__",
secondarySchemaId: "__secondarySchemaId__",
secondarySchemaSource: "__secondarySchemaSource__",
secondarySchemaUri: "__secondarySchemaUri__",
};
let replaced = experiments.isEnabled("fdcwebhooks")
? DATACONNECT_WEBHOOKS_YAML_TEMPLATE
: DATACONNECT_YAML_TEMPLATE;
if (secondarySchemas && secondarySchemas.length > 0) {
let secondaryReplaced = "";
for (const schema of secondarySchemas) {
secondaryReplaced += SECONDARY_SCHEMA_YAML_TEMPLATE;
secondaryReplaced = secondaryReplaced.replace(
replacements.secondarySchemaId,
JSON.stringify(schema.id),
);
secondaryReplaced = secondaryReplaced.replace(
replacements.secondarySchemaSource,
`"./schema_${schema.id}"`,
);
secondaryReplaced = secondaryReplaced.replace(
replacements.secondarySchemaUri,
JSON.stringify(schema.uri),
);
}
replaced = replaced.replace("#__secondarySchemaPlaceholder__\n", secondaryReplaced);
} else {
// If no secondary schemas, remove the secondary schema placeholder.
replaced = replaced.replace("#__secondarySchemaPlaceholder__\n", "");
}
for (const [k, v] of Object.entries(replacementValues)) {
replaced = replaced.replace(replacements[k], JSON.stringify(v));
}
Expand Down Expand Up @@ -552,17 +603,30 @@
},
],
};
const mainSch = mainSchema(schemas);
const primaryDatasource = mainSch.datasources.find((d) => d.postgresql);
if (primaryDatasource?.postgresql?.cloudSql?.instance) {
const instanceName = parseCloudSQLInstanceName(primaryDatasource.postgresql.cloudSql.instance);
info.cloudSqlInstanceId = instanceName.instanceId;
}
// TODO: Update dataconnect.yaml with downloaded secondary schemas as well.
if (mainSch.source.files?.length) {
info.serviceGql.schemaGql = mainSch.source.files;
for (const sch of schemas) {
if (isMainSchema(sch)) {
const primaryDatasource = sch.datasources.find((d) => d.postgresql);
if (primaryDatasource?.postgresql?.cloudSql?.instance) {
const instanceName = parseCloudSQLInstanceName(
primaryDatasource.postgresql.cloudSql.instance,
);
info.cloudSqlInstanceId = instanceName.instanceId;
}
info.cloudSqlDatabase = primaryDatasource?.postgresql?.database ?? "";
if (sch.source.files?.length) {
info.serviceGql.schemaGql = sch.source.files;
}
} else {
if (!info.serviceGql.secondarySchemaGqls) {
info.serviceGql.secondarySchemaGqls = [];
}
info.serviceGql.secondarySchemaGqls.push({
id: sch.name.split("/").pop()!,
files: sch.source.files || [],
uri: sch.datasources[0].httpGraphql?.uri ?? "",
});
}
}
info.cloudSqlDatabase = primaryDatasource?.postgresql?.database ?? "";
const connectors = await listConnectors(serviceName, [
"connectors.name",
"connectors.source.files",
Expand Down
1 change: 1 addition & 0 deletions templates/init/dataconnect/dataconnect-fdcwebhooks.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -10,4 +10,5 @@ schemas:
instanceId: __cloudSqlInstanceId__
# schemaValidation: "STRICT" # STRICT mode makes Postgres schema match Data Connect exactly.
# schemaValidation: "COMPATIBLE" # COMPATIBLE mode makes Postgres schema compatible with Data Connect.
#__secondarySchemaPlaceholder__
connectorDirs: __connectorDirs__
5 changes: 5 additions & 0 deletions templates/init/dataconnect/secondary_schema.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
- id: __secondarySchemaId__
source: __secondarySchemaSource__
datasource:
httpGraphql:
uri: __secondarySchemaUri__
Loading