Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
27 changes: 0 additions & 27 deletions src/gcp_scanner/crawl.py
Original file line number Diff line number Diff line change
Expand Up @@ -183,33 +183,6 @@ def get_gke_images(project_name: str, access_token: str) -> Dict[str, Any]:
return images


def get_endpoints(project_id: str,
service: discovery.Resource) -> List[Dict[str, Any]]:
"""Retrieve a list of Endpoints available in the project.

Args:
project_id: A name of a project to query info about.
service: A resource object for interacting with the service management API.

Returns:
A list of Endpoints in the project.
"""

logging.info("Retrieving info about endpoints")
endpoints_list = list()
try:
request = service.services().list(producerProjectId=project_id)
while request is not None:
response = request.execute()
endpoints_list = response.get("services", [])
request = service.services().list_next(
previous_request=request, previous_response=response)
except Exception:
logging.info("Failed to retrieve endpoints list for project %s", project_id)
logging.info(sys.exc_info())
return endpoints_list


def get_sas_for_impersonation(
iam_policy: List[Dict[str, Any]]) -> List[str]:
"""Extract a list of unique SAs from IAM policy associated with project.
Expand Down
9 changes: 5 additions & 4 deletions src/gcp_scanner/crawler/crawler_factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,15 +30,15 @@
from gcp_scanner.crawler.compute_subnets_crawler import ComputeSubnetsCrawler
from gcp_scanner.crawler.dns_managed_zones_crawler import DNSManagedZonesCrawler
from gcp_scanner.crawler.dns_policies_crawler import DNSPoliciesCrawler
from gcp_scanner.crawler.endpoints_crawler import EndpointsCrawler
from gcp_scanner.crawler.filestore_instances_crawler import FilestoreInstancesCrawler
from gcp_scanner.crawler.kms_keys_crawler import KMSKeysCrawler
from gcp_scanner.crawler.machine_images_crawler import ComputeMachineImagesCrawler
from gcp_scanner.crawler.source_repo_crawler import CloudSourceRepoCrawler
from gcp_scanner.crawler.sql_instances_crawler import SQLInstancesCrawler
from gcp_scanner.crawler.spanner_instances_crawler import SpannerInstancesCrawler
from gcp_scanner.crawler.pubsub_subscriptions_crawler import PubSubSubscriptionsCrawler
from gcp_scanner.crawler.service_usage_crawler import ServiceUsageCrawler

from gcp_scanner.crawler.source_repo_crawler import CloudSourceRepoCrawler
from gcp_scanner.crawler.spanner_instances_crawler import SpannerInstancesCrawler
from gcp_scanner.crawler.sql_instances_crawler import SQLInstancesCrawler

service_crawler_map = {
"app_services": AppServicesCrawler,
Expand All @@ -50,6 +50,7 @@
"compute_instances": ComputeInstancesCrawler,
"compute_snapshots": ComputeSnapshotsCrawler,
"dns_policies": DNSPoliciesCrawler,
"endpoints": EndpointsCrawler,
"filestore_instances": FilestoreInstancesCrawler,
"firewall_rules": ComputeFirewallRulesCrawler,
"iam_policy": CloudResourceManagerIAMPolicyCrawler,
Expand Down
48 changes: 48 additions & 0 deletions src/gcp_scanner/crawler/endpoints_crawler.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
# Copyright 2023 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import sys
from typing import List, Dict, Any

from googleapiclient import discovery

from gcp_scanner.crawler.interface_crawler import ICrawler


class EndpointsCrawler(ICrawler):
"""Handle crawling of endpoints data."""

def crawl(self, project_name: str, service: discovery.Resource) -> List[Dict[str, Any]]:
"""Retrieve a list of Endpoints available in the project.

Args:
project_name: The name of the project to query information about.
service: A resource object for interacting with the GCP API.

Returns:
A list of resource objects representing the crawled data.
"""
logging.info("Retrieving info about endpoints")
endpoints_list = list()
try:
request = service.services().list(producerProjectId=project_name)
while request is not None:
response = request.execute()
endpoints_list = response.get("services", [])
request = service.services().list_next(
previous_request=request, previous_response=response)
except Exception:
logging.info("Failed to retrieve endpoints list for project %s", project_name)
logging.info(sys.exc_info())
return endpoints_list
4 changes: 3 additions & 1 deletion src/gcp_scanner/scanner.py
Original file line number Diff line number Diff line change
Expand Up @@ -401,7 +401,9 @@ def crawl_loop(initial_sa_tuples: List[Tuple[str, Credentials, List[str]]],

# Get information about Endpoints
if is_set(scan_config, 'endpoints'):
project_result['endpoints'] = crawl.get_endpoints(
project_result['endpoints'] = CrawlerFactory.create_crawler(
'endpoints',
).crawl(
project_id,
ClientFactory.get_client('servicemanagement').get_service(
credentials,
Expand Down
27 changes: 18 additions & 9 deletions src/gcp_scanner/test_unit.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,6 @@
import requests
from google.oauth2 import credentials


from . import crawl
from . import credsdb
from . import scanner
Expand All @@ -55,8 +54,8 @@
from .client.storage_client import StorageClient
from .crawler.app_services_crawler import AppServicesCrawler
from .crawler.bigquery_crawler import BigQueryCrawler
from .crawler.cloud_functions_crawler import CloudFunctionsCrawler
from .crawler.bigtable_instances_crawler import BigTableInstancesCrawler
from .crawler.cloud_functions_crawler import CloudFunctionsCrawler
from .crawler.cloud_resource_manager_iam_policy_crawler import CloudResourceManagerIAMPolicyCrawler
from .crawler.cloud_resource_manager_project_info_crawler import CloudResourceManagerProjectInfoCrawler
from .crawler.cloud_resource_manager_project_list_crawler import CloudResourceManagerProjectListCrawler
Expand All @@ -68,17 +67,18 @@
from .crawler.compute_static_ips_crawler import ComputeStaticIPsCrawler
from .crawler.compute_subnets_crawler import ComputeSubnetsCrawler
from .crawler.crawler_factory import CrawlerFactory
from .crawler.filestore_instances_crawler import FilestoreInstancesCrawler
from .crawler.dns_managed_zones_crawler import DNSManagedZonesCrawler
from .crawler.dns_policies_crawler import DNSPoliciesCrawler
from .crawler.endpoints_crawler import EndpointsCrawler
from .crawler.filestore_instances_crawler import FilestoreInstancesCrawler
from .crawler.kms_keys_crawler import KMSKeysCrawler
from .crawler.machine_images_crawler import ComputeMachineImagesCrawler
from .crawler.sql_instances_crawler import SQLInstancesCrawler
from .crawler.spanner_instances_crawler import SpannerInstancesCrawler
from .crawler.pubsub_subscriptions_crawler import PubSubSubscriptionsCrawler
from .crawler.service_usage_crawler import ServiceUsageCrawler
from .credsdb import get_scopes_from_refresh_token
from .crawler.source_repo_crawler import CloudSourceRepoCrawler
from .crawler.spanner_instances_crawler import SpannerInstancesCrawler
from .crawler.sql_instances_crawler import SQLInstancesCrawler
from .credsdb import get_scopes_from_refresh_token

PROJECT_NAME = "test-gcp-scanner-2"

Expand Down Expand Up @@ -201,8 +201,10 @@ def test_creds_fetching():
# impersonate_sa()
shutil.rmtree("unit")


class TestGetSADetailsFromKeyFiles(unittest.TestCase):
"""Test fetching sa credentials from keyfiles."""

@patch("gcp_scanner.scanner.credsdb.get_creds_from_file")
def test_get_sa_details_from_key_files(self, mocked_get_creds):
# create temp directory and keyfiles
Expand All @@ -224,7 +226,7 @@ def test_get_sa_details_from_key_files(self, mocked_get_creds):

@patch("gcp_scanner.scanner.credsdb.get_creds_from_file")
def test_get_sa_details_from_key_files_without_json_file(
self, mocked_get_creds
self, mocked_get_creds
):
# create temp directory and keyfiles
with tempfile.TemporaryDirectory() as key_path:
Expand All @@ -242,7 +244,7 @@ def test_get_sa_details_from_key_files_without_json_file(

@patch("gcp_scanner.scanner.credsdb.get_creds_from_file")
def test_get_sa_details_from_key_files_with_invalid_and_valid_key_file(
self, mocked_get_creds
self, mocked_get_creds
):
# create temp directory and keyfiles
with tempfile.TemporaryDirectory() as key_path:
Expand Down Expand Up @@ -677,7 +679,9 @@ def test_endpoints(self):
"""Test endpoints' information."""
self.assertTrue(
verify(
crawl.get_endpoints(
CrawlerFactory.create_crawler(
"endpoints"
).crawl(
PROJECT_NAME,
ClientFactory.get_client("servicemanagement").get_service(
self.credentials,
Expand Down Expand Up @@ -998,6 +1002,11 @@ def test_create_crawler_service_usage(self):
crawler = CrawlerFactory.create_crawler("services")
self.assertIsInstance(crawler, ServiceUsageCrawler)

def test_create_crawler_endpoints(self):
"""Test create_crawler method with 'endpoints' name."""
crawler = CrawlerFactory.create_crawler("endpoints")
self.assertIsInstance(crawler, EndpointsCrawler)

def test_create_crawler_invalid(self):
"""Test create_crawler method with invalid name."""
with self.assertLogs(level=logging.ERROR) as log:
Expand Down