Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
62 changes: 27 additions & 35 deletions bigquery/unit_tests/test_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,13 @@

import unittest

import mock


def _make_credentials():
import google.auth.credentials
return mock.Mock(spec=google.auth.credentials.Credentials)


class TestClient(unittest.TestCase):

Expand All @@ -28,7 +35,7 @@ def _make_one(self, *args, **kw):
def test_ctor(self):
from google.cloud.bigquery._http import Connection
PROJECT = 'PROJECT'
creds = object()
creds = _make_credentials()
http = object()
client = self._make_one(project=PROJECT, credentials=creds, http=http)
self.assertIsInstance(client._connection, Connection)
Expand Down Expand Up @@ -57,7 +64,7 @@ def test_list_projects_defaults(self):
'friendlyName': 'Two'},
]
}
creds = object()
creds = _make_credentials()
client = self._make_one(PROJECT_1, creds)
conn = client._connection = _Connection(DATA)

Expand Down Expand Up @@ -86,7 +93,7 @@ def test_list_projects_explicit_response_missing_projects_key(self):
PATH = 'projects'
TOKEN = 'TOKEN'
DATA = {}
creds = object()
creds = _make_credentials()
client = self._make_one(PROJECT, creds)
conn = client._connection = _Connection(DATA)

Expand Down Expand Up @@ -128,7 +135,7 @@ def test_list_datasets_defaults(self):
'friendlyName': 'Two'},
]
}
creds = object()
creds = _make_credentials()
client = self._make_one(PROJECT, creds)
conn = client._connection = _Connection(DATA)

Expand Down Expand Up @@ -156,7 +163,7 @@ def test_list_datasets_explicit_response_missing_datasets_key(self):
PATH = 'projects/%s/datasets' % PROJECT
TOKEN = 'TOKEN'
DATA = {}
creds = object()
creds = _make_credentials()
client = self._make_one(PROJECT, creds)
conn = client._connection = _Connection(DATA)

Expand All @@ -176,36 +183,21 @@ def test_list_datasets_explicit_response_missing_datasets_key(self):
self.assertEqual(req['query_params'],
{'all': True, 'maxResults': 3, 'pageToken': TOKEN})

def test_dataset_defaults(self):
def test_dataset(self):
from google.cloud.bigquery.dataset import Dataset
PROJECT = 'PROJECT'
DATASET = 'dataset_name'
creds = object()
creds = _make_credentials()
http = object()
client = self._make_one(project=PROJECT, credentials=creds, http=http)
dataset = client.dataset(DATASET)
self.assertIsInstance(dataset, Dataset)
self.assertEqual(dataset.name, DATASET)
self.assertIs(dataset._client, client)
self.assertEqual(dataset.project, PROJECT)

def test_dataset_explicit(self):
from google.cloud.bigquery.dataset import Dataset
PROJECT = 'my-project-123'
OTHER_PROJECT = 'other-project-456'
DATASET = 'dataset_name'
creds = object()
http = object()
client = self._make_one(project=PROJECT, credentials=creds, http=http)
dataset = client.dataset(DATASET, project=OTHER_PROJECT)
self.assertIsInstance(dataset, Dataset)
self.assertEqual(dataset.name, DATASET)
self.assertIs(dataset._client, client)
self.assertEqual(dataset.project, OTHER_PROJECT)

def test_job_from_resource_unknown_type(self):
PROJECT = 'PROJECT'
creds = object()
creds = _make_credentials()
client = self._make_one(PROJECT, creds)
with self.assertRaises(ValueError):
client.job_from_resource({'configuration': {'nonesuch': {}}})
Expand Down Expand Up @@ -319,7 +311,7 @@ def test_list_jobs_defaults(self):
LOAD_DATA,
]
}
creds = object()
creds = _make_credentials()
client = self._make_one(PROJECT, creds)
conn = client._connection = _Connection(DATA)

Expand Down Expand Up @@ -375,7 +367,7 @@ def test_list_jobs_load_job_wo_sourceUris(self):
LOAD_DATA,
]
}
creds = object()
creds = _make_credentials()
client = self._make_one(PROJECT, creds)
conn = client._connection = _Connection(DATA)

Expand Down Expand Up @@ -403,7 +395,7 @@ def test_list_jobs_explicit_missing(self):
PATH = 'projects/%s/jobs' % PROJECT
DATA = {}
TOKEN = 'TOKEN'
creds = object()
creds = _make_credentials()
client = self._make_one(PROJECT, creds)
conn = client._connection = _Connection(DATA)

Expand Down Expand Up @@ -434,7 +426,7 @@ def test_load_table_from_storage(self):
DATASET = 'dataset_name'
DESTINATION = 'destination_table'
SOURCE_URI = 'http://example.com/source.csv'
creds = object()
creds = _make_credentials()
http = object()
client = self._make_one(project=PROJECT, credentials=creds, http=http)
dataset = client.dataset(DATASET)
Expand All @@ -453,7 +445,7 @@ def test_copy_table(self):
DATASET = 'dataset_name'
SOURCE = 'source_table'
DESTINATION = 'destination_table'
creds = object()
creds = _make_credentials()
http = object()
client = self._make_one(project=PROJECT, credentials=creds, http=http)
dataset = client.dataset(DATASET)
Expand All @@ -473,7 +465,7 @@ def test_extract_table_to_storage(self):
DATASET = 'dataset_name'
SOURCE = 'source_table'
DESTINATION = 'gs://bucket_name/object_name'
creds = object()
creds = _make_credentials()
http = object()
client = self._make_one(project=PROJECT, credentials=creds, http=http)
dataset = client.dataset(DATASET)
Expand All @@ -490,7 +482,7 @@ def test_run_async_query_defaults(self):
PROJECT = 'PROJECT'
JOB = 'job_name'
QUERY = 'select count(*) from persons'
creds = object()
creds = _make_credentials()
http = object()
client = self._make_one(project=PROJECT, credentials=creds, http=http)
job = client.run_async_query(JOB, QUERY)
Expand All @@ -508,7 +500,7 @@ def test_run_async_w_udf_resources(self):
PROJECT = 'PROJECT'
JOB = 'job_name'
QUERY = 'select count(*) from persons'
creds = object()
creds = _make_credentials()
http = object()
client = self._make_one(project=PROJECT, credentials=creds, http=http)
udf_resources = [UDFResource("resourceUri", RESOURCE_URI)]
Expand All @@ -526,7 +518,7 @@ def test_run_async_w_query_parameters(self):
PROJECT = 'PROJECT'
JOB = 'job_name'
QUERY = 'select count(*) from persons'
creds = object()
creds = _make_credentials()
http = object()
client = self._make_one(project=PROJECT, credentials=creds, http=http)
query_parameters = [ScalarQueryParameter('foo', 'INT64', 123)]
Expand All @@ -543,7 +535,7 @@ def test_run_sync_query_defaults(self):
from google.cloud.bigquery.query import QueryResults
PROJECT = 'PROJECT'
QUERY = 'select count(*) from persons'
creds = object()
creds = _make_credentials()
http = object()
client = self._make_one(project=PROJECT, credentials=creds, http=http)
query = client.run_sync_query(QUERY)
Expand All @@ -560,7 +552,7 @@ def test_run_sync_query_w_udf_resources(self):
RESOURCE_URI = 'gs://some-bucket/js/lib.js'
PROJECT = 'PROJECT'
QUERY = 'select count(*) from persons'
creds = object()
creds = _make_credentials()
http = object()
client = self._make_one(project=PROJECT, credentials=creds, http=http)
udf_resources = [UDFResource("resourceUri", RESOURCE_URI)]
Expand All @@ -577,7 +569,7 @@ def test_run_sync_query_w_query_parameters(self):
from google.cloud.bigquery.query import QueryResults
PROJECT = 'PROJECT'
QUERY = 'select count(*) from persons'
creds = object()
creds = _make_credentials()
http = object()
client = self._make_one(project=PROJECT, credentials=creds, http=http)
query_parameters = [ScalarQueryParameter('foo', 'INT64', 123)]
Expand Down
Loading