Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 16 additions & 6 deletions .generator/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import shutil
import subprocess
import sys
from pathlib import Path
from typing import Dict, List

try:
Expand Down Expand Up @@ -282,31 +283,40 @@ def _copy_files_needed_for_post_processing(output: str, input: str, library_id:

def _clean_up_files_after_post_processing(output: str, library_id: str):
"""
Clean up files which should not be included in the generated client
Clean up files which should not be included in the generated client.
This function is idempotent and will not fail if files are already removed.

Args:
output(str): Path to the directory in the container where code
should be generated.
library_id(str): The library id to be used for post processing.
"""
path_to_library = f"packages/{library_id}"
shutil.rmtree(f"{output}/{path_to_library}/.nox")
os.remove(f"{output}/{path_to_library}/CHANGELOG.md")
os.remove(f"{output}/{path_to_library}/docs/CHANGELOG.md")
os.remove(f"{output}/{path_to_library}/docs/README.rst")

# Safely remove directories, ignoring errors if they don't exist.
shutil.rmtree(f"{output}/{path_to_library}/.nox", ignore_errors=True)
shutil.rmtree(f"{output}/owl-bot-staging", ignore_errors=True)

# Safely remove specific files if they exist using pathlib.
Path(f"{output}/{path_to_library}/CHANGELOG.md").unlink(missing_ok=True)
Path(f"{output}/{path_to_library}/docs/CHANGELOG.md").unlink(missing_ok=True)
Path(f"{output}/{path_to_library}/docs/README.rst").unlink(missing_ok=True)

# The glob loops are already safe, as they do nothing if no files match.
for post_processing_file in glob.glob(
f"{output}/{path_to_library}/scripts/client-post-processing/*.yaml"
): # pragma: NO COVER
os.remove(post_processing_file)

for gapic_version_file in glob.glob(
f"{output}/{path_to_library}/**/gapic_version.py", recursive=True
): # pragma: NO COVER
os.remove(gapic_version_file)

for snippet_metadata_file in glob.glob(
f"{output}/{path_to_library}/samples/generated_samples/snippet_metadata*.json"
): # pragma: NO COVER
os.remove(snippet_metadata_file)
shutil.rmtree(f"{output}/owl-bot-staging")


def handle_generate(
Expand Down
39 changes: 39 additions & 0 deletions .generator/test-resources/librarian/generate-request.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
{
"id": "google-cloud-language",
"version": "2.17.2",
"last_generated_commit": "97a83d76a09a7f6dcab43675c87bdfeb5bcf1cb5",
"apis": [
{
"path": "google/cloud/language/v1beta2",
"service_config": "language_v1beta2.yaml",
"status": ""
},
{
"path": "google/cloud/language/v2",
"service_config": "language_v2.yaml",
"status": ""
},
{
"path": "google/cloud/language/v1",
"service_config": "language_v1.yaml",
"status": ""
}
],
"source_roots": [
"packages/google-cloud-language"
],
"preserve_regex": [
".OwlBot.yaml",
"packages/google-cloud-language/CHANGELOG.md",
"docs/CHANGELOG.md",
"docs/README.rst",
"samples/README.txt",
"tar.gz",
"gapic_version.py",
"samples/generated_samples/snippet_metadata_",
"scripts/client-post-processing"
],
"remove_regex": [
"packages/google-cloud-language"
]
}
33 changes: 30 additions & 3 deletions cloudbuild-test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,9 @@
# Reduce this timeout by moving the installation of Python runtimes to a separate base image
timeout: 7200s # 2 hours for the first uncached run, can be lowered later.
steps:
# A single step using the Kaniko executor to build and cache
# Step 1: Build the generator image using Kaniko and push it to the registry.
- name: 'gcr.io/kaniko-project/executor:latest'
id: 'build-generator'
args:
# Specifies the Dockerfile path
- '--dockerfile=.generator/Dockerfile'
Expand All @@ -27,10 +28,36 @@ steps:
- '--destination=gcr.io/$PROJECT_ID/python-librarian-generator:latest'
# Enables Kaniko's remote registry caching
- '--cache=true'
# (Optional but recommended) Sets a time-to-live for cache layers
# Sets a time-to-live for cache layers
- '--cache-ttl=24h'

# The 'images' section is no longer needed because Kaniko pushes the image itself.
# Step 2: Clone the googleapis repository into the workspace.
# This runs in parallel with the image build.
- name: 'gcr.io/cloud-builders/git'
id: 'clone-googleapis'
args: ['clone', '--depth', '1', 'https://github.com/googleapis/googleapis.git', '/workspace/googleapis']
waitFor: ['-']

# Step 3: Run the generator to generate the library code.
- name: 'gcr.io/cloud-builders/docker'
id: 'generate-library'
args:
- 'run'
- '--rm'
# Mount the cloned googleapis repo from the workspace.
- '-v'
- '/workspace/googleapis:/app/source'
# Mount the generator-input from this repo's workspace.
- '-v'
- '/workspace/.librarian/generator-input:/app/input'
# Mount the test-resources/librarian from this repo's workspace as the librarian dir.
- '-v'
- '/workspace/.generator/test-resources/librarian:/app/librarian'
# The image that was built in the first step.
- 'gcr.io/$PROJECT_ID/python-librarian-generator:latest'
# The command to execute inside the container.
- 'generate'
waitFor: ['build-generator', 'clone-googleapis']

options:
default_logs_bucket_behavior: REGIONAL_USER_OWNED_BUCKET
Expand Down
Loading