From 1878b8ba090270bc7dc2f6040710f504cab4ef9a Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Fri, 29 Jan 2016 10:44:57 -0800 Subject: [PATCH] Adding datastore samples. --- datastore/api/README.md | 4 + datastore/api/index.yaml | 25 ++ datastore/api/snippets.py | 820 ++++++++++++++++++++++++++++++++++++++ 3 files changed, 849 insertions(+) create mode 100644 datastore/api/README.md create mode 100644 datastore/api/index.yaml create mode 100644 datastore/api/snippets.py diff --git a/datastore/api/README.md b/datastore/api/README.md new file mode 100644 index 000000000000..a53482eda9d6 --- /dev/null +++ b/datastore/api/README.md @@ -0,0 +1,4 @@ +# Cloud Datastore API Samples + + + diff --git a/datastore/api/index.yaml b/datastore/api/index.yaml new file mode 100644 index 000000000000..fc79990752d2 --- /dev/null +++ b/datastore/api/index.yaml @@ -0,0 +1,25 @@ +indexes: +- kind: Task + properties: + - name: done + - name: priority + direction: desc +- kind: Task + properties: + - name: priority + - name: percent_complete +- kind: Task + properties: + - name: priority + direction: desc + - name: created +- kind: Task + properties: + - name: type + - name: priority +- kind: Task + properties: + - name: priority + - name: done + - name: created + direction: desc diff --git a/datastore/api/snippets.py b/datastore/api/snippets.py new file mode 100644 index 000000000000..535ac06d985e --- /dev/null +++ b/datastore/api/snippets.py @@ -0,0 +1,820 @@ +# Copyright 2016, Google, Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +from collections import defaultdict +import datetime +from pprint import pprint +import time + +import gcloud +from gcloud import datastore + + +def incomplete_key(client): + # [START incomplete_key] + key = client.key('Task') + # [END incomplete_key] + + return key + + +def named_key(client): + # [START named_key] + key = client.key('Task', 'sample_task') + # [END named_key] + + return key + + +def key_with_parent(client): + # [START key_with_parent] + key = client.key('TaskList', 'default', 'Task', 'sample_task') + # [END key_with_parent] + + return key + + +def key_with_multilevel_parent(client): + # [START key_with_multilevel_parent] + key = client.key( + 'User', 'alice', + 'TaskList', 'default', + 'Task', 'sample_task') + # [END key_with_multilevel_parent] + + return key + + +def basic_entity(client): + # [START basic_entity] + task = datastore.Entity() + task.update({ + 'type': 'Personal', + 'done': False, + 'priority': 4, + 'description': 'Learn Cloud Datastore' + }) + # [END basic_entity] + + return task + + +def entity_with_parent(client): + # [START entity_with_parent] + key_with_parent = client.key( + 'TaskList', 'default', 'Task', 'sample_task') + + task = datastore.Entity(key=key_with_parent) + + task.update({ + 'type': 'Personal', + 'done': False, + 'priority': 4, + 'description': 'Learn Cloud Datastore' + }) + # [END entity_with_parent] + + return task + + +def properties(client): + # [START properties] + task = datastore.Entity() + task.update({ + # String + 'type': 'Personal', + 'description': 'Learn Cloud Datastore', + # Date + 'created': datetime.datetime.utcnow(), + # Boolean + 'done': False, + # Integer + 'priority': 4, + # Float + 'percent_complete': 10.5, + }) + # [END properties] + + return task + + +def array_value(client): + # [START array_value] + task = datastore.Entity() + task.update({ + 'tags': [ + 'fun', + 'programming' + ], + 'collaborators': [ + 'alice', + 'bob' + ] + }) + # [END array_value] + + return task + + +def upsert(client): + # [START upsert] + complete_key = client.key('Task', 'sample_task') + + task = datastore.Entity(key=complete_key) + + task.update({ + 'type': 'Personal', + 'done': False, + 'priority': 4, + 'description': 'Learn Cloud Datastore' + }) + + client.put(task) + # [END upsert] + + return task + + +def insert(client): + # [START upsert] + incomplete_key = client.key('Task') + + task = datastore.Entity(key=incomplete_key) + + task.update({ + 'type': 'Personal', + 'done': False, + 'priority': 4, + 'description': 'Learn Cloud Datastore' + }) + + client.put(task) + # [END upsert] + + return task + + +def update(client): + # Create the entity we're going to update. + upsert(client) + + # [START update] + key = client.key('Task', 'sample_task') + task = client.get(key) + + task['done'] = True + + client.put(task) + # [END update] + + return task + + +def lookup(client): + # Create the entity that we're going to look up. + upsert(client) + + # [START lookup] + key = client.key('Task', 'sample_task') + task = client.get(key) + # + # [END lookup] + + return task + + +def delete(client): + # Create the entity we're going to delete. + upsert(client) + + # [START delete] + key = client.key('Task', 'sample_task') + client.delete(key) + # [END delete] + + return key + + +def batch_upsert(client): + # [START batch_upsert] + task1 = datastore.Entity(client.key('Task', 1)) + + task1.update({ + 'type': 'Personal', + 'done': False, + 'priority': 4, + 'description': 'Learn Cloud Datastore' + }) + + task2 = datastore.Entity(client.key('Task', 2)) + + task2.update({ + 'type': 'Work', + 'done': False, + 'priority': 8, + 'description': 'Integrate Cloud Datastore' + }) + + client.put_multi([task1, task2]) + # [END batch_upsert] + + return task1, task2 + + +def batch_lookup(client): + # Create the entities we will lookup. + batch_upsert(client) + + # [START batch_lookup] + keys = [ + client.key('Task', 1), + client.key('Task', 2) + ] + + tasks = client.get_multi(keys) + # [, ] + # [END batch_lookup] + + return tasks + + +def batch_delete(client): + # Create the entities we will delete. + batch_upsert(client) + + # [START batch_delete] + keys = [ + client.key('Task', 1), + client.key('Task', 2) + ] + + client.delete_multi(keys) + # [END batch_delete] + + return keys + + +def unindexed_property_query(client): + # Create the entity that we're going to query. + upsert(client) + + # [START unindexed_property_query] + query = client.query(kind='Task') + query.add_filter('description', '=', 'Learn Cloud Datastore') + tasks = list(query.fetch()) + # [, ... ] + # [END unindexed_property_query] + + return tasks + + +def basic_query(client): + # Create the entity that we're going to query. + upsert(client) + + # [START basic_query] + query = client.query(kind='Task') + query.add_filter('done', '=', False) + query.add_filter('priority', '>=', 4) + query.order = ['-priority'] + + tasks = list(query.fetch()) + # [END basic_query] + + return tasks + + +def projection_query(client): + # Create the entity that we're going to query. + task = datastore.Entity(client.key('Task')) + task.update({ + 'type': 'Personal', + 'done': False, + 'priority': 4, + 'description': 'Learn Cloud Datastore', + 'percent_complete': 0.5 + }) + client.put(task) + + # [START projection_query] + query = client.query(kind='Task') + query.projection = ['priority', 'percent_complete'] + # [END projection_query] + + # [START run_query_projection] + priorities = [] + percent_completes = [] + + for task in query.fetch(): + priorities.append(task['priority']) + percent_completes.append(task['priority']) + # [END run_query_projection] + + return priorities, percent_completes + + +def ancestor_query(client): + task = datastore.Entity( + client.key('TaskList', 'default', 'Task')) + task.update({ + 'type': 'Personal', + 'description': 'Learn Cloud Datastore', + }) + client.put(task) + + # [START ancestor_query] + ancestor = client.key('TaskList', 'default') + query = client.query(kind='Task', ancestor=ancestor) + tasks = list(query.fetch()) + # [END ancestor_query] + + return tasks + + +def run_query(client): + + # [START run_query] + query = client.query() + results = list(query.fetch()) + # [END run_query] + + return results + + +def limit(client): + + # [START run_query] + query = client.query() + tasks = list(query.fetch(limit=5)) + # [END run_query] + + return tasks + + +def cursor_paging(client): + + # [START cursor_paging] + tasks = [] + query = client.query(kind='Task') + it = query.fetch(limit=5) + + while True: + results, _, cursor = it.next_page() + + if not results: + break + + tasks.append(results) + # [END cursor_paging] + + return tasks + + +def property_filter(client): + # [START property_filter] + query = client.query(kind='Task') + query.add_filter('done', '=', False) + tasks = list(query.fetch()) + # [END property_filter] + + return tasks + + +def composite_filter(client): + # Create the entity that we're going to query. + upsert(client) + + # [START composite_filter] + query = client.query(kind='Task') + query.add_filter('done', '=', False) + query.add_filter('priority', '=', 4) + tasks = list(query.fetch()) + # [END composite_filter] + + return tasks + + +def key_filter(client): + # Create the entity that we're going to query. + upsert(client) + + # [START key_filter] + query = client.query(kind='Task') + query.add_filter('__key__', '>', client.key('Task', 'first_task')) + tasks = list(query.fetch()) + # [END key_filter] + + return tasks + + +def ascending_sort(client): + # Create the entity that we're going to query. + upsert(client) + + # [START ascending_sort] + query = client.query(kind='Task') + query.order = ['created'] + tasks = list(query.fetch()) + # [END ascending_sort] + + return tasks + + +def descending_sort(client): + # Create the entity that we're going to query. + upsert(client) + + # [START descending_sort] + query = client.query(kind='Task') + query.order = ['-created'] + tasks = list(query.fetch()) + # [END descending_sort] + + return tasks + + +def multi_sort(client): + # Create the entity that we're going to query. + upsert(client) + + # [START multi_sort] + query = client.query(kind='Task') + query.order = [ + '-priority', + 'created' + ] + tasks = list(query.fetch()) + # [END multi_sort] + + return tasks + + +def keys_only_query(client): + # Create the entity that we're going to query. + upsert(client) + + # [START keys_only_query] + query = client.query() + query.keys_only() + keys = list([x.key for x in query.fetch()]) + # [END keys_only_query] + + return keys + + +def distinct_query(client): + # Create the entity that we're going to query. + upsert(client) + + # [START distinct_query] + query = client.query(kind='Task') + query.group_by = ['type', 'priority'] + query.order = ['type', 'priority'] + tasks = list(query.fetch()) + # [END distinct_query] + + return tasks + + +def distinct_on_query(client): + # Create the entity that we're going to query. + upsert(client) + + # [START distinct_on_query] + query = client.query(kind='Task') + query.group_by = ['type'] + query.order = ['type', 'priority'] + tasks = list(query.fetch()) + # [END distinct_on_query] + + return tasks + + +def kindless_query(client): + # [START kindless_query] + an_hour_ago = str(time.time() - 3600) + last_seen_key = client.key('Task', an_hour_ago) + query = client.query() + query.add_filter('__key__', '>', last_seen_key) + entities = list(query.fetch()) + # [END kindless_query] + + return entities + + +def inequality_range(client): + # [START inequality_range] + query = client.query(kind='Task') + query.add_filter( + 'created', '>', datetime.datetime(1990, 1, 1)) + query.add_filter( + 'created', '<', datetime.datetime(2000, 12, 31, 23, 59, 59)) + tasks = list(query.fetch()) + # [END inequality_range] + + return tasks + + +def inequality_invalid(client): + try: + # [START inequality_invalid] + query = client.query(kind='Task') + query.add_filter( + 'created', '>', datetime.datetime(1990, 1, 1)) + query.add_filter( + 'priority', '>', 3) + tasks = list(query.fetch()) + # [END inequality_invalid] + + return tasks + + except gcloud.exceptions.BadRequest: + pass + + +def equal_and_inequality_range(client): + # [START equal_and_inequality_range] + query = client.query(kind='Task') + query.add_filter('priority', '=', 4) + query.add_filter('done', '=', False) + query.add_filter( + 'created', '>', datetime.datetime(1990, 1, 1)) + query.add_filter( + 'created', '<', datetime.datetime(2000, 12, 31, 23, 59, 59)) + tasks = list(query.fetch()) + # [END equal_and_inequality_range] + + return tasks + + +def inequality_sort(client): + # [START inequality_sort] + query = client.query(kind='Task') + query.add_filter('priority', '>', 3) + query.order = ['priority', 'created'] + tasks = list(query.fetch()) + # [END inequality_sort] + + return tasks + + +def inequality_sort_invalid_not_same(client): + try: + # [START inequality_sort_invalid_not_same] + query = client.query(kind='Task') + query.add_filter('priority', '>', 3) + query.order = ['created'] + tasks = list(query.fetch()) + # [END inequality_sort_invalid_not_same] + + return tasks + + except gcloud.exceptions.BadRequest: + pass + + +def inequality_sort_invalid_not_first(client): + try: + # [START inequality_sort_invalid_not_first] + query = client.query(kind='Task') + query.add_filter('priority', '>', 3) + query.order = ['created', 'priority'] + tasks = list(query.fetch()) + # [END inequality_sort_invalid_not_first] + + return tasks + + except gcloud.exceptions.BadRequest: + pass + + +def array_value_inequality_range(client): + # [START array_value_inequality_range] + query = client.query(kind='Task') + query.add_filter('tag', '>', 'learn') + query.add_filter('tag', '<', 'math') + tasks = list(query.fetch()) + # [END array_value_inequality_range] + + return tasks + + +def array_value_equality(client): + # [START array_value_equality] + query = client.query(kind='Task') + query.add_filter('tag', '=', 'fun') + query.add_filter('tag', '=', 'programming') + tasks = list(query.fetch()) + # [END array_value_equality] + + return tasks + + +def exploding_properties(client): + # [START exploding_queries] + task = datastore.Entity(client.key('Task')) + task.update({ + 'tags': [ + 'fun', + 'programming', + 'learn' + ], + 'collaborators': [ + 'alice', + 'bob', + 'charlie' + ], + 'created': datetime.datetime.utcnow() + }) + # [END exploding_queries] + + client.put(task) + return task + + +def transactional_update(client): + # Create the entities we're going to manipulate + account1 = datastore.Entity(client.key('Account')) + account1['balance'] = 100 + account2 = datastore.Entity(client.key('Account')) + account2['balance'] = 100 + client.put_multi([account1, account2]) + + # [START transactional_update] + def transfer_funds(client, from_key, to_key, amount): + with client.transaction(): + from_account, to_account = client.get_multi([from_key, to_key]) + + from_account['balance'] -= amount + to_account['balance'] += amount + + client.put_multi([from_account, to_account]) + + return from_account, to_account + # [END transactional_update] + + # [START transactional_retry] + for _ in range(5): + try: + return transfer_funds(client, account1.key, account2.key, 50) + except gcloud.exceptions.Conflict: + continue + # [END transaction_retry] + + +def transactional_get_or_create(client): + # [START transactional_get_or_create] + with client.transaction(): + key = client.key('Task', datetime.datetime.utcnow().isoformat()) + + task = client.get(key) + + if not task: + task = datastore.Entity(key) + task.update({ + 'description': 'Example task' + }) + client.put(task) + + return task + # [END transactional_get_or_create] + + +def transactional_single_entity_group_read_only(client): + client.put_multi([ + datastore.Entity(key=client.key('TaskList', 'default')), + datastore.Entity(key=client.key('TaskList', 'default', 'Task', 1)) + ]) + + # [START transactional_single_entity_group_read_only] + with client.transaction(): + task_list_key = client.key('TaskList', 'default') + + task_list = client.get(task_list_key) + + query = client.query(kind='Task', ancestor=task_list_key) + tasks_in_list = list(query.fetch()) + + return task_list, tasks_in_list + # [END transactional_single_entity_group_read_only] + + +def namespace_run_query(client): + # Create an entity in another namespace. + task = datastore.Entity( + client.key('Task', namespace='namespace-b')) + client.put(task) + + # [START namespace_run_query] + # All namespaces + query = client.query(kind='__namespace__') + query.keys_only() + + all_namespaces = [x.key.id_or_name for x in query.fetch()] + + # Filtered namespaces + query = client.query(kind='__namespace__') + query.add_filter( + '__key__', '>=', client.key('__namespace__', 'namespace-a')) + query.add_filter( + '__key__', '<', client.key('__namespace__', 'namespace-z')) + + filtered_namespaces = [x.key.id_or_name for x in query.fetch()] + # [END namespace_run_query] + + return all_namespaces, filtered_namespaces + + +def kind_run_query(client): + # Create the entity that we're going to query. + upsert(client) + + # [START kind_run_query] + query = client.query(kind='__kind__') + query.keys_only() + + kinds = [x.key.id_or_name for x in query.fetch()] + # [END kind_run_query] + + return kinds + + +def property_run_query(client): + # Create the entity that we're going to query. + upsert(client) + + # [START kind_run_query] + query = client.query(kind='__property__') + query.keys_only() + + properties_by_kind = defaultdict(list) + + for entity in query.fetch(): + kind = entity.key.parent.name + property = entity.key.name + + properties_by_kind[kind].append(property) + # [END kind_run_query] + + return properties_by_kind + + +def property_by_kind_run_query(client): + # Create the entity that we're going to query. + upsert(client) + + # [START property_by_kind_run_query] + ancestor = client.key('__kind__', 'Task') + query = client.query(kind='__property__', ancestor=ancestor) + + representations_by_property = {} + + for entity in query.fetch(): + property_name = entity.key.name + property_types = entity['property_representation'] + + representations_by_property[property_name] = property_types + + # [END property_by_kind_run_query] + + return representations_by_property + + +def main(project_id): + client = datastore.Client(project_id) + + for name, function in globals().iteritems(): + if name in ('main', 'defaultdict') or not callable(function): + continue + + print(name) + pprint(function(client)) + print('\n-----------------\n') + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description='Demonstrates datastore API operations.') + parser.add_argument('project_id', help='Your cloud project ID.') + + args = parser.parse_args() + + main(args.project_id)