Skip to content

Commit

Permalink
docs(samples): add bigquery export samples (#315)
Browse files Browse the repository at this point in the history
* docs(samples): init add bigquery export samples

* minor var name change

* minor var name change

* added projectid to create bigquery dataset

* 🦉 Updates from OwlBot post-processor

See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md

* updated project id var

* updated dataset id

* fixture scope change

* lint fix

* minor path syntax fix

* added capsys to delete

* 🦉 Updates from OwlBot post-processor

See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md

* removed capsys check

* updated requirements with specific version

* updated comments

Co-authored-by: Anthonios Partheniou <partheniou@google.com>
Co-authored-by: Owl Bot <gcf-owl-bot[bot]@users.noreply.github.com>
  • Loading branch information
3 people authored and dandhlee committed Jan 26, 2023
1 parent e79c53b commit 34cfea7
Show file tree
Hide file tree
Showing 3 changed files with 288 additions and 1 deletion.
3 changes: 2 additions & 1 deletion securitycenter/snippets/requirements-test.txt
Original file line number Diff line number Diff line change
@@ -1 +1,2 @@
pytest
pytest==6.2.5
google-cloud-bigquery==2.34.2
190 changes: 190 additions & 0 deletions securitycenter/snippets/snippets_bigquery_export.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,190 @@
#!/usr/bin/env python
#
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

"""Snippets on exporting findings from Security Command Center to BigQuery."""


# [START securitycenter_create_bigquery_export]


def create_bigquery_export(
parent: str, export_filter: str, bigquery_dataset_id: str, bigquery_export_id: str
):

from google.cloud import securitycenter

"""
Create export configuration to export findings from a project to a BigQuery dataset.
Optionally specify filter to export certain findings only.
Args:
parent: Use any one of the following resource paths:
- organizations/{organization_id}
- folders/{folder_id}
- projects/{project_id}
export_filter: Expression that defines the filter to apply across create/update events of findings.
bigquery_dataset_id: The BigQuery dataset to write findings' updates to.
bigquery_export_id: Unique identifier provided by the client.
- example id: f"default-{str(uuid.uuid4()).split('-')[0]}"
For more info, see:
https://cloud.google.com/security-command-center/docs/how-to-analyze-findings-in-big-query#export_findings_from_to
"""
client = securitycenter.SecurityCenterClient()

# Create the BigQuery export configuration.
bigquery_export = securitycenter.BigQueryExport()
bigquery_export.description = "Export low and medium findings if the compute resource has an IAM anomalous grant"
bigquery_export.filter = export_filter
bigquery_export.dataset = f"{parent}/datasets/{bigquery_dataset_id}"

request = securitycenter.CreateBigQueryExportRequest()
request.parent = parent
request.big_query_export = bigquery_export
request.big_query_export_id = bigquery_export_id

# Create the export request.
response = client.create_big_query_export(request)

print(f"BigQuery export request created successfully: {response.name}\n")


# [END securitycenter_create_bigquery_export]


# [START securitycenter_get_bigquery_export]
def get_bigquery_export(parent: str, bigquery_export_id: str):
from google.cloud import securitycenter

"""
Retrieve an existing BigQuery export.
Args:
parent: Use any one of the following resource paths:
- organizations/{organization_id}
- folders/{folder_id}
- projects/{project_id}
bigquery_export_id: Unique identifier that is used to identify the export.
"""

client = securitycenter.SecurityCenterClient()

request = securitycenter.GetBigQueryExportRequest()
request.name = f"{parent}/bigQueryExports/{bigquery_export_id}"

response = client.get_big_query_export(request)
print(f"Retrieved the BigQuery export: {response.name}")


# [END securitycenter_get_bigquery_export]


# [START securitycenter_list_bigquery_export]
def list_bigquery_exports(parent: str):
from google.cloud import securitycenter

"""
List BigQuery exports in the given parent.
Args:
parent: The parent which owns the collection of BigQuery exports.
Use any one of the following resource paths:
- organizations/{organization_id}
- folders/{folder_id}
- projects/{project_id}
"""

client = securitycenter.SecurityCenterClient()

request = securitycenter.ListBigQueryExportsRequest()
request.parent = parent

response = client.list_big_query_exports(request)

print("Listing BigQuery exports:")
for bigquery_export in response:
print(bigquery_export.name)


# [END securitycenter_list_bigquery_export]


# [START securitycenter_update_bigquery_export]
def update_bigquery_export(parent: str, export_filter: str, bigquery_export_id: str):
"""
Updates an existing BigQuery export.
Args:
parent: Use any one of the following resource paths:
- organizations/{organization_id}
- folders/{folder_id}
- projects/{project_id}
export_filter: Expression that defines the filter to apply across create/update events of findings.
bigquery_export_id: Unique identifier provided by the client.
For more info, see:
https://cloud.google.com/security-command-center/docs/how-to-analyze-findings-in-big-query#export_findings_from_to
"""
from google.cloud import securitycenter
from google.protobuf import field_mask_pb2

client = securitycenter.SecurityCenterClient()

# Set the new values for export configuration.
bigquery_export = securitycenter.BigQueryExport()
bigquery_export.name = f"{parent}/bigQueryExports/{bigquery_export_id}"
bigquery_export.filter = export_filter

# Field mask to only update the export filter.
# Set the update mask to specify which properties should be updated.
# If empty, all mutable fields will be updated.
# For more info on constructing field mask path, see the proto or:
# https://googleapis.dev/python/protobuf/latest/google/protobuf/field_mask_pb2.html
field_mask = field_mask_pb2.FieldMask(paths=["filter"])

request = securitycenter.UpdateBigQueryExportRequest()
request.big_query_export = bigquery_export
request.update_mask = field_mask

response = client.update_big_query_export(request)

if response.filter != export_filter:
print("Failed to update BigQueryExport!")
return
print("BigQueryExport updated successfully!")


# [END securitycenter_update_bigquery_export]


# [START securitycenter_delete_bigquery_export]
def delete_bigquery_export(parent: str, bigquery_export_id: str):
"""
Delete an existing BigQuery export.
Args:
parent: Use any one of the following resource paths:
- organizations/{organization_id}
- folders/{folder_id}
- projects/{project_id}
bigquery_export_id: Unique identifier that is used to identify the export.
"""
from google.cloud import securitycenter

client = securitycenter.SecurityCenterClient()

request = securitycenter.DeleteBigQueryExportRequest()
request.name = f"{parent}/bigQueryExports/{bigquery_export_id}"

client.delete_big_query_export(request)
print(f"BigQuery export request deleted successfully: {bigquery_export_id}")


# [END securitycenter_delete_bigquery_export]
96 changes: 96 additions & 0 deletions securitycenter/snippets/snippets_bigquery_export_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,96 @@
#!/usr/bin/env python
#
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.


# TODO(developer): Replace these variables before running the sample.
import os
import re
import uuid

from _pytest.capture import CaptureFixture
import pytest

import snippets_bigquery_export

PROJECT_ID = os.environ["GOOGLE_CLOUD_PROJECT"]
GOOGLE_APPLICATION_CREDENTIALS = os.environ["GOOGLE_APPLICATION_CREDENTIALS"]
BIGQUERY_DATASET_ID = f"sampledataset{str(uuid.uuid4()).split('-')[0]}"


@pytest.fixture(scope="module")
def bigquery_export_id():
bigquery_export_id = f"default-{str(uuid.uuid4()).split('-')[0]}"

create_bigquery_dataset(BIGQUERY_DATASET_ID)
export_filter = 'severity="LOW" OR severity="MEDIUM"'
snippets_bigquery_export.create_bigquery_export(
f"projects/{PROJECT_ID}", export_filter, BIGQUERY_DATASET_ID, bigquery_export_id
)

yield bigquery_export_id

snippets_bigquery_export.delete_bigquery_export(
f"projects/{PROJECT_ID}", bigquery_export_id
)
delete_bigquery_dataset(BIGQUERY_DATASET_ID)


def create_bigquery_dataset(dataset_id: str):
from google.cloud import bigquery

bigquery_client = bigquery.Client()

dataset_id_full = "{}.{}".format(PROJECT_ID, dataset_id)
dataset = bigquery.Dataset(dataset_id_full)

dataset = bigquery_client.create_dataset(dataset)
print("Dataset {} created.".format(dataset.dataset_id))


def delete_bigquery_dataset(dataset_id: str):
from google.cloud import bigquery

bigquery_client = bigquery.Client()
bigquery_client.delete_dataset(dataset_id)
print("Dataset {} deleted.".format(dataset_id))


def test_get_bigquery_export(capsys: CaptureFixture, bigquery_export_id: str):
snippets_bigquery_export.get_bigquery_export(
f"projects/{PROJECT_ID}", bigquery_export_id
)
out, _ = capsys.readouterr()
assert re.search(
"Retrieved the BigQuery export",
out,
)
assert re.search(f"bigQueryExports/{bigquery_export_id}", out)


def test_list_bigquery_exports(capsys: CaptureFixture, bigquery_export_id: str):
snippets_bigquery_export.list_bigquery_exports(f"projects/{PROJECT_ID}")
out, _ = capsys.readouterr()
assert re.search("Listing BigQuery exports:", out)
assert re.search(bigquery_export_id, out)


def test_update_bigquery_exports(capsys: CaptureFixture, bigquery_export_id: str):
export_filter = 'severity="MEDIUM"'
snippets_bigquery_export.update_bigquery_export(
f"projects/{PROJECT_ID}", export_filter, bigquery_export_id
)
out, _ = capsys.readouterr()
assert re.search("BigQueryExport updated successfully!", out)

0 comments on commit 34cfea7

Please sign in to comment.