Skip to content

Commit

Permalink
bigquery: remove unused function (#4147)
Browse files Browse the repository at this point in the history
  • Loading branch information
jba authored and tswast committed Oct 16, 2017
1 parent 76d303f commit 74af691
Show file tree
Hide file tree
Showing 2 changed files with 0 additions and 111 deletions.
43 changes: 0 additions & 43 deletions bigquery/google/cloud/bigquery/table.py
Original file line number Diff line number Diff line change
Expand Up @@ -831,46 +831,3 @@ def _build_schema_resource(fields):
info['fields'] = _build_schema_resource(field.fields)
infos.append(info)
return infos
# pylint: enable=unused-argument


def _get_upload_metadata(source_format, schema, project, dataset_id, table_id):
"""Get base metadata for creating a table.
:type source_format: str
:param source_format: one of 'CSV' or 'NEWLINE_DELIMITED_JSON'.
job configuration option.
:type schema: list
:param schema: List of :class:`SchemaField` associated with a table.
:type project: str
:param table_id: The project bound to the table.
:type dataset_id: str
:param table_id: The dataset_id of the dataset.
:type table_id: str
:param table_id: The table_id of the table.
:rtype: dict
:returns: The metadata dictionary.
"""
load_config = {
'sourceFormat': source_format,
'destinationTable': {
'projectId': project,
'datasetId': dataset_id,
'tableId': table_id,
},
}
if schema:
load_config['schema'] = {
'fields': _build_schema_resource(schema),
}

return {
'configuration': {
'load': load_config,
},
}
68 changes: 0 additions & 68 deletions bigquery/tests/unit/test_table.py
Original file line number Diff line number Diff line change
Expand Up @@ -1101,74 +1101,6 @@ def test_w_subfields(self):
'mode': 'REQUIRED'}]})


class Test__get_upload_metadata(unittest.TestCase):

@staticmethod
def _call_fut(source_format, schema, project, dataset_id, name):
from google.cloud.bigquery.table import _get_upload_metadata

return _get_upload_metadata(
source_format, schema, project, dataset_id, name)

def test_empty_schema(self):
source_format = 'AVRO'
dataset = mock.Mock(project='prediction',
spec=['dataset_id', 'project'])
dataset.dataset_id = 'market' # mock.Mock() treats `name` specially.
table_name = 'chairs'
metadata = self._call_fut(source_format, [], dataset.project,
dataset.dataset_id, table_name)

expected = {
'configuration': {
'load': {
'sourceFormat': source_format,
'destinationTable': {
'projectId': dataset.project,
'datasetId': dataset.dataset_id,
'tableId': table_name,
},
},
},
}
self.assertEqual(metadata, expected)

def test_with_schema(self):
from google.cloud.bigquery.table import SchemaField

source_format = 'CSV'
full_name = SchemaField('full_name', 'STRING', mode='REQUIRED')
dataset = mock.Mock(project='blind', spec=['dataset_id', 'project'])
dataset.dataset_id = 'movie' # mock.Mock() treats `name` specially.
table_name = 'teebull-neem'
metadata = self._call_fut(
source_format, [full_name], dataset.project,
dataset.dataset_id, table_name)

expected = {
'configuration': {
'load': {
'sourceFormat': source_format,
'destinationTable': {
'projectId': dataset.project,
'datasetId': dataset.dataset_id,
'tableId': table_name,
},
'schema': {
'fields': [
{
'name': full_name.name,
'type': full_name.field_type,
'mode': full_name.mode,
},
],
},
},
},
}
self.assertEqual(metadata, expected)


class _Client(object):

_query_results = ()
Expand Down

0 comments on commit 74af691

Please sign in to comment.