Skip to content

Commit

Permalink
chore(deps): update dependency google-cloud-pubsub to v2 (#54)
Browse files Browse the repository at this point in the history
* chore(deps): update dependency google-cloud-pubsub to v2

* pubsub v2 fix

* run blacken

Co-authored-by: Leah Cole <coleleah@google.com>
  • Loading branch information
renovate-bot and leahecole committed Dec 28, 2020
1 parent c7d86f1 commit 626977d
Show file tree
Hide file tree
Showing 3 changed files with 45 additions and 13 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -84,13 +84,13 @@ def topic_id():
publisher = google.cloud.pubsub.PublisherClient()
topic_path = publisher.topic_path(GCLOUD_PROJECT, TOPIC_ID)
try:
publisher.create_topic(topic_path)
publisher.create_topic(request={"name": topic_path})
except google.api_core.exceptions.AlreadyExists:
pass

yield TOPIC_ID

publisher.delete_topic(topic_path)
publisher.delete_topic(request={"topic": topic_path})


@pytest.fixture(scope="module")
Expand All @@ -100,13 +100,15 @@ def subscription_id(topic_id):
topic_path = subscriber.topic_path(GCLOUD_PROJECT, topic_id)
subscription_path = subscriber.subscription_path(GCLOUD_PROJECT, SUBSCRIPTION_ID)
try:
subscriber.create_subscription(subscription_path, topic_path)
subscriber.create_subscription(
request={"name": subscription_path, "topic": topic_path}
)
except google.api_core.exceptions.AlreadyExists:
pass

yield SUBSCRIPTION_ID

subscriber.delete_subscription(subscription_path)
subscriber.delete_subscription(request={"subscription": subscription_path})


@pytest.fixture(scope="module")
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
google-cloud-dlp==3.0.0
google-cloud-storage==1.35.0
google-cloud-pubsub==1.7.0
google-cloud-pubsub==2.2.0
google-cloud-datastore==1.15.3
google-cloud-bigquery==2.6.1
46 changes: 38 additions & 8 deletions packages/google-cloud-dlp/samples/snippets/risk_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,13 +49,13 @@ def topic_id():
publisher = google.cloud.pubsub.PublisherClient()
topic_path = publisher.topic_path(GCLOUD_PROJECT, TOPIC_ID)
try:
publisher.create_topic(topic_path)
publisher.create_topic(request={"name": topic_path})
except google.api_core.exceptions.AlreadyExists:
pass

yield TOPIC_ID

publisher.delete_topic(topic_path)
publisher.delete_topic(request={"topic": topic_path})


@pytest.fixture(scope="module")
Expand All @@ -65,13 +65,15 @@ def subscription_id(topic_id):
topic_path = subscriber.topic_path(GCLOUD_PROJECT, topic_id)
subscription_path = subscriber.subscription_path(GCLOUD_PROJECT, SUBSCRIPTION_ID)
try:
subscriber.create_subscription(subscription_path, topic_path)
subscriber.create_subscription(
request={"name": subscription_path, "topic": topic_path}
)
except google.api_core.exceptions.AlreadyExists:
pass

yield SUBSCRIPTION_ID

subscriber.delete_subscription(subscription_path)
subscriber.delete_subscription(request={"subscription": subscription_path})


@pytest.fixture(scope="module")
Expand Down Expand Up @@ -118,12 +120,40 @@ def bigquery_project():

rows_to_insert = [(u"Gary Smith", u"My email is gary@example.com")]
harmful_rows_to_insert = [
(u"Gandalf", u"(123) 456-7890", "4231 5555 6781 9876", 27, "Male", "US",),
(u"Dumbledore", u"(313) 337-1337", "6291 8765 1095 7629", 27, "Male", "US",),
(
u"Gandalf",
u"(123) 456-7890",
"4231 5555 6781 9876",
27,
"Male",
"US",
),
(
u"Dumbledore",
u"(313) 337-1337",
"6291 8765 1095 7629",
27,
"Male",
"US",
),
(u"Joe", u"(452) 123-1234", "3782 2288 1166 3030", 35, "Male", "US"),
(u"James", u"(567) 890-1234", "8291 3627 8250 1234", 19, "Male", "US"),
(u"Marie", u"(452) 123-1234", "8291 3627 8250 1234", 35, "Female", "US",),
(u"Carrie", u"(567) 890-1234", "2253 5218 4251 4526", 35, "Female", "US",),
(
u"Marie",
u"(452) 123-1234",
"8291 3627 8250 1234",
35,
"Female",
"US",
),
(
u"Carrie",
u"(567) 890-1234",
"2253 5218 4251 4526",
35,
"Female",
"US",
),
]

bigquery_client.insert_rows(table, rows_to_insert)
Expand Down

0 comments on commit 626977d

Please sign in to comment.