Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 18 additions & 5 deletions gcloud/_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -452,24 +452,37 @@ def _bytes_to_unicode(value):
raise ValueError('%r could not be converted to unicode' % (value,))


def _pb_timestamp_to_datetime(timestamp):
def _pb_timestamp_to_datetime(timestamp_pb):
"""Convert a Timestamp protobuf to a datetime object.

:type timestamp: :class:`google.protobuf.timestamp_pb2.Timestamp`
:param timestamp: A Google returned timestamp protobuf.
:type timestamp_pb: :class:`google.protobuf.timestamp_pb2.Timestamp`
:param timestamp_pb: A Google returned timestamp protobuf.

:rtype: :class:`datetime.datetime`
:returns: A UTC datetime object converted from a protobuf timestamp.
"""
return (
_EPOCH +
datetime.timedelta(
seconds=timestamp.seconds,
microseconds=(timestamp.nanos / 1000.0),
seconds=timestamp_pb.seconds,
microseconds=(timestamp_pb.nanos / 1000.0),
)
)


def _pb_timestamp_to_rfc3339(timestamp_pb):
"""Convert a Timestamp protobuf to an RFC 3339 string.

:type timestamp_pb: :class:`google.protobuf.timestamp_pb2.Timestamp`
:param timestamp_pb: A Google returned timestamp protobuf.

:rtype: string
:returns: An RFC 3339 formatted timestamp string.
"""
timestamp = _pb_timestamp_to_datetime(timestamp_pb)
return _datetime_to_rfc3339(timestamp)


def _datetime_to_pb_timestamp(when):
"""Convert a datetime object to a Timestamp protobuf.

Expand Down
28 changes: 28 additions & 0 deletions gcloud/_testing.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,34 @@ def __exit__(self, exc_type, exc_val, exc_tb):
os.remove(self.name)


class _GAXBaseAPI(object):

_random_gax_error = False

def __init__(self, **kw):
self.__dict__.update(kw)

def _make_grpc_error(self, status_code):
from grpc.framework.interfaces.face.face import AbortionError

class _DummyException(AbortionError):
code = status_code

def __init__(self):
super(_DummyException, self).__init__(
None, None, self.code, None)

return _DummyException()

def _make_grpc_not_found(self):
from grpc.beta.interfaces import StatusCode
return self._make_grpc_error(StatusCode.NOT_FOUND)

def _make_grpc_failed_precondition(self):
from grpc.beta.interfaces import StatusCode
return self._make_grpc_error(StatusCode.FAILED_PRECONDITION)


class _GAXPageIterator(object):

def __init__(self, items, page_token):
Expand Down
79 changes: 40 additions & 39 deletions gcloud/logging/_gax.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,7 @@
from gcloud.exceptions import Conflict
from gcloud.exceptions import NotFound
from gcloud._helpers import _datetime_to_pb_timestamp
from gcloud._helpers import _datetime_to_rfc3339
from gcloud._helpers import _pb_timestamp_to_datetime
from gcloud._helpers import _pb_timestamp_to_rfc3339


class _LoggingAPI(object):
Expand All @@ -45,7 +44,7 @@ def __init__(self, gax_api):
self._gax_api = gax_api

def list_entries(self, projects, filter_='', order_by='',
page_size=0, page_token=None):
page_size=0, page_token=INITIAL_PAGE):
"""Return a page of log entry resources.

:type projects: list of strings
Expand Down Expand Up @@ -74,7 +73,7 @@ def list_entries(self, projects, filter_='', order_by='',
if not None, indicates that more entries can be retrieved
with another call (pass that value as ``page_token``).
"""
options = _build_paging_options(page_token)
options = CallOptions(page_token=page_token)
page_iter = self._gax_api.list_log_entries(
projects, filter_, order_by, page_size, options)
entries = [_log_entry_pb_to_mapping(entry_pb)
Expand Down Expand Up @@ -136,7 +135,7 @@ class _SinksAPI(object):
def __init__(self, gax_api):
self._gax_api = gax_api

def list_sinks(self, project, page_size=0, page_token=None):
def list_sinks(self, project, page_size=0, page_token=INITIAL_PAGE):
"""List sinks for the project associated with this client.

:type project: string
Expand All @@ -156,7 +155,7 @@ def list_sinks(self, project, page_size=0, page_token=None):
if not None, indicates that more sinks can be retrieved
with another call (pass that value as ``page_token``).
"""
options = _build_paging_options(page_token)
options = CallOptions(page_token=page_token)
path = 'projects/%s' % (project,)
page_iter = self._gax_api.list_sinks(path, page_size, options)
sinks = [_log_sink_pb_to_mapping(log_sink_pb)
Expand Down Expand Up @@ -280,7 +279,7 @@ class _MetricsAPI(object):
def __init__(self, gax_api):
self._gax_api = gax_api

def list_metrics(self, project, page_size=0, page_token=None):
def list_metrics(self, project, page_size=0, page_token=INITIAL_PAGE):
"""List metrics for the project associated with this client.

:type project: string
Expand All @@ -300,7 +299,7 @@ def list_metrics(self, project, page_size=0, page_token=None):
if not None, indicates that more metrics can be retrieved
with another call (pass that value as ``page_token``).
"""
options = _build_paging_options(page_token)
options = CallOptions(page_token=page_token)
path = 'projects/%s' % (project,)
page_iter = self._gax_api.list_log_metrics(path, page_size, options)
metrics = [_log_metric_pb_to_mapping(log_metric_pb)
Expand Down Expand Up @@ -413,16 +412,12 @@ def metric_delete(self, project, metric_name):
raise


def _build_paging_options(page_token=None):
"""Helper for :meth:'_PublisherAPI.list_topics' et aliae."""
if page_token is None:
page_token = INITIAL_PAGE
options = {'page_token': page_token}
return CallOptions(**options)


def _mon_resource_pb_to_mapping(resource_pb):
"""Helper for :func:_log_entry_pb_to_mapping"""
"""Helper for :func:_log_entry_pb_to_mapping`.

Performs "impedance matching" between the protobuf attrs and the keys
expected in the JSON API.
"""

This comment was marked as spam.

This comment was marked as spam.

mapping = {
'type': resource_pb.type,
}
Expand All @@ -431,14 +426,12 @@ def _mon_resource_pb_to_mapping(resource_pb):
return mapping


def _pb_timestamp_to_rfc3339(timestamp_pb):
"""Helper for :func:_log_entry_pb_to_mapping"""
timestamp = _pb_timestamp_to_datetime(timestamp_pb)
return _datetime_to_rfc3339(timestamp)


def _value_pb_to_value(value_pb):
"""Helper for :func:`_log_entry_pb_to_mapping`."""
"""Helper for :func:`_log_entry_pb_to_mapping`.

Performs "impedance matching" between the protobuf attrs and the keys
expected in the JSON API.
"""
kind = value_pb.WhichOneof('kind')

if kind is None:
Expand Down Expand Up @@ -467,17 +460,20 @@ def _value_pb_to_value(value_pb):


def _struct_pb_to_mapping(struct_pb):
"""Helper for :func:`_log_entry_pb_to_mapping`."""
"""Helper for :func:`_log_entry_pb_to_mapping`.

Performs "impedance matching" between the protobuf attrs and the keys
expected in the JSON API.
"""
return dict([(key, _value_pb_to_value(struct_pb.fields[key]))
for key in struct_pb.fields])


def _log_entry_pb_to_mapping(entry_pb):
"""Helper for :meth:`list_entries`, et aliae

Ideally, would use a function from :mod:`protobuf.json_format`, but
the right one isn't public. See:
https://github.com/google/protobuf/issues/1351
Performs "impedance matching" between the protobuf attrs and the keys
expected in the JSON API.
"""
mapping = {
'logName': entry_pb.log_name,
Expand Down Expand Up @@ -523,7 +519,11 @@ def _log_entry_pb_to_mapping(entry_pb):


def _http_request_mapping_to_pb(info, request):
"""Helper for _log_entry_mapping_to_pb"""
"""Helper for _log_entry_mapping_to_pb

Performs "impedance matching" between the protobuf attrs and the keys
expected in the JSON API.
"""
optional_request_keys = {
'requestMethod': 'request_method',
'requestUrl': 'request_url',
Expand All @@ -541,7 +541,11 @@ def _http_request_mapping_to_pb(info, request):


def _log_operation_mapping_to_pb(info, operation):
"""Helper for _log_entry_mapping_to_pb"""
"""Helper for _log_entry_mapping_to_pb

Performs "impedance matching" between the protobuf attrs and the keys
expected in the JSON API.
"""
operation.producer = info['producer']
operation.id = info['id']

Expand All @@ -555,9 +559,8 @@ def _log_operation_mapping_to_pb(info, operation):
def _log_entry_mapping_to_pb(mapping):
"""Helper for :meth:`write_entries`, et aliae

Ideally, would use a function from :mod:`protobuf.json_format`, but
the right one isn't public. See:
https://github.com/google/protobuf/issues/1351
Performs "impedance matching" between the protobuf attrs and the keys
expected in the JSON API.
"""
# pylint: disable=too-many-branches
entry_pb = LogEntry()
Expand Down Expand Up @@ -611,9 +614,8 @@ def _log_entry_mapping_to_pb(mapping):
def _log_sink_pb_to_mapping(sink_pb):
"""Helper for :meth:`list_sinks`, et aliae

Ideally, would use a function from :mod:`protobuf.json_format`, but
the right one isn't public. See:
https://github.com/google/protobuf/issues/1351
Performs "impedance matching" between the protobuf attrs and the keys
expected in the JSON API.
"""
return {
'name': sink_pb.name,
Expand All @@ -625,9 +627,8 @@ def _log_sink_pb_to_mapping(sink_pb):
def _log_metric_pb_to_mapping(metric_pb):
"""Helper for :meth:`list_metrics`, et aliae

Ideally, would use a function from :mod:`protobuf.json_format`, but
the right one isn't public. See:
https://github.com/google/protobuf/issues/1351
Performs "impedance matching" between the protobuf attrs and the keys
expected in the JSON API.
"""
return {
'name': metric_pb.name,
Expand Down
30 changes: 2 additions & 28 deletions gcloud/logging/test__gax.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,8 @@
else:
_HAVE_GAX = True

from gcloud._testing import _GAXBaseAPI


class _Base(object):
PROJECT = 'PROJECT'
Expand Down Expand Up @@ -910,34 +912,6 @@ def WhichOneof(self, name):
self._callFUT(_Value())


class _GAXBaseAPI(object):

_random_gax_error = False

def __init__(self, **kw):
self.__dict__.update(kw)

def _make_grpc_error(self, status_code):
from grpc.framework.interfaces.face.face import AbortionError

class _DummyException(AbortionError):
code = status_code

def __init__(self):
super(_DummyException, self).__init__(
None, None, self.code, None)

return _DummyException()

def _make_grpc_not_found(self):
from grpc.beta.interfaces import StatusCode
return self._make_grpc_error(StatusCode.NOT_FOUND)

def _make_grpc_failed_precondition(self):
from grpc.beta.interfaces import StatusCode
return self._make_grpc_error(StatusCode.FAILED_PRECONDITION)


class _GAXLoggingAPI(_GAXBaseAPI):

_delete_not_found = False
Expand Down
Loading