Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
86 changes: 52 additions & 34 deletions gcloud/logging/_gax.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,8 @@
from gcloud.exceptions import Conflict
from gcloud.exceptions import NotFound
from gcloud._helpers import _datetime_to_pb_timestamp
from gcloud._helpers import _datetime_to_rfc3339
from gcloud._helpers import _pb_timestamp_to_datetime


class _LoggingAPI(object):
Expand Down Expand Up @@ -397,6 +399,22 @@ def _build_paging_options(page_token=None):
return CallOptions(**options)


def _mon_resource_pb_to_mapping(resource_pb):
"""Helper for :func:_log_entry_pb_to_mapping"""
mapping = {
'type': resource_pb.type,
}
if resource_pb.labels:
mapping['labels'] = resource_pb.labels
return mapping


def _pb_timestamp_to_rfc3339(timestamp_pb):
"""Helper for :func:_log_entry_pb_to_mapping"""
timestamp = _pb_timestamp_to_datetime(timestamp_pb)
return _datetime_to_rfc3339(timestamp)


def _log_entry_pb_to_mapping(entry_pb):
"""Helper for :meth:`list_entries`, et aliae

Expand All @@ -405,20 +423,20 @@ def _log_entry_pb_to_mapping(entry_pb):
https://github.com/google/protobuf/issues/1351
"""
mapping = {
'log_name': entry_pb.log_name,
'resource': entry_pb.resource,
'logName': entry_pb.log_name,
'resource': _mon_resource_pb_to_mapping(entry_pb.resource),
'severity': entry_pb.severity,
'insert_id': entry_pb.insert_id,
'timestamp': entry_pb.timestamp,
'insertId': entry_pb.insert_id,
'timestamp': _pb_timestamp_to_rfc3339(entry_pb.timestamp),
'labels': entry_pb.labels,
'text_payload': entry_pb.text_payload,
'json_payload': entry_pb.json_payload,
'proto_payload': entry_pb.proto_payload,
'textPayload': entry_pb.text_payload,
'jsonPayload': entry_pb.json_payload,
'protoPayload': entry_pb.proto_payload,
}

if entry_pb.http_request:
request = entry_pb.http_request
mapping['http_request'] = {
mapping['httpRequest'] = {
'request_method': request.request_method,
'request_url': request.request_url,
'status': request.status,
Expand All @@ -444,20 +462,20 @@ def _log_entry_pb_to_mapping(entry_pb):

def _http_request_mapping_to_pb(info, request):
"""Helper for _log_entry_mapping_to_pb"""
optional_request_keys = (
'request_method',
'request_url',
'status',
'referer',
'user_agent',
'cache_hit',
'request_size',
'response_size',
'remote_ip',
)
for key in optional_request_keys:
optional_request_keys = {
'requestMethod': 'request_method',
'requestUrl': 'request_url',
'status': 'status',
'referer': 'referer',
'userAgent': 'user_agent',
'cacheHit': 'cache_hit',
'requestSize': 'request_size',
'responseSize': 'response_size',
'remoteIp': 'remote_ip',
}
for key, pb_name in optional_request_keys.items():
if key in info:
setattr(request, key, info[key])
setattr(request, pb_name, info[key])


def _log_operation_mapping_to_pb(info, operation):
Expand All @@ -482,15 +500,15 @@ def _log_entry_mapping_to_pb(mapping):
# pylint: disable=too-many-branches
entry_pb = LogEntry()

optional_scalar_keys = (
'log_name',
'insert_id',
'text_payload',
)
optional_scalar_keys = {
'logName': 'log_name',
'insertId': 'insert_id',
'textPayload': 'text_payload',
}

for key in optional_scalar_keys:
for key, pb_name in optional_scalar_keys.items():
if key in mapping:
setattr(entry_pb, key, mapping[key])
setattr(entry_pb, pb_name, mapping[key])

if 'resource' in mapping:
entry_pb.resource.type = mapping['resource']['type']
Expand All @@ -509,16 +527,16 @@ def _log_entry_mapping_to_pb(mapping):
for key, value in mapping['labels'].items():
entry_pb.labels[key] = value

if 'json_payload' in mapping:
for key, value in mapping['json_payload'].items():
if 'jsonPayload' in mapping:
for key, value in mapping['jsonPayload'].items():
entry_pb.json_payload[key] = value

if 'proto_payload' in mapping:
Parse(json.dumps(mapping['proto_payload']), entry_pb.proto_payload)
if 'protoPayload' in mapping:
Parse(json.dumps(mapping['protoPayload']), entry_pb.proto_payload)

if 'http_request' in mapping:
if 'httpRequest' in mapping:
_http_request_mapping_to_pb(
mapping['http_request'], entry_pb.http_request)
mapping['httpRequest'], entry_pb.http_request)

if 'operation' in mapping:
_log_operation_mapping_to_pb(
Expand Down
80 changes: 49 additions & 31 deletions gcloud/logging/test__gax.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,9 +64,9 @@ def test_list_entries_no_paging(self):
self.assertEqual(len(entries), 1)
entry = entries[0]
self.assertIsInstance(entry, dict)
self.assertEqual(entry['log_name'], self.LOG_NAME)
self.assertEqual(entry['logName'], self.LOG_NAME)
self.assertEqual(entry['resource'], {'type': 'global'})
self.assertEqual(entry['text_payload'], TEXT)
self.assertEqual(entry['textPayload'], TEXT)
self.assertEqual(next_token, TOKEN)

projects, filter_, order_by, page_size, options = (
Expand Down Expand Up @@ -94,9 +94,9 @@ def test_list_entries_with_paging(self):
self.assertEqual(len(entries), 1)
entry = entries[0]
self.assertIsInstance(entry, dict)
self.assertEqual(entry['log_name'], self.LOG_NAME)
self.assertEqual(entry['logName'], self.LOG_NAME)
self.assertEqual(entry['resource'], {'type': 'global'})
self.assertEqual(entry['json_payload'], PAYLOAD)
self.assertEqual(entry['jsonPayload'], PAYLOAD)
self.assertEqual(next_token, NEW_TOKEN)

projects, filter_, order_by, page_size, options = (
Expand All @@ -108,7 +108,12 @@ def test_list_entries_with_paging(self):
self.assertEqual(options.page_token, TOKEN)

def test_list_entries_with_extra_properties(self):
from datetime import datetime
from gcloud._testing import _GAXPageIterator
from gcloud._helpers import UTC
from gcloud._helpers import _datetime_to_rfc3339
from gcloud._helpers import _datetime_to_pb_timestamp
NOW = datetime.utcnow().replace(tzinfo=UTC)
SIZE = 23
TOKEN = 'TOKEN'
NEW_TOKEN = 'NEW_TOKEN'
Expand All @@ -128,6 +133,8 @@ def test_list_entries_with_extra_properties(self):
'operation': operation,
}
ENTRY = _LogEntryPB(self.LOG_NAME, proto_payload=PAYLOAD, **EXTRAS)
ENTRY.resource.labels['foo'] = 'bar'
ENTRY.timestamp = _datetime_to_pb_timestamp(NOW)
response = _GAXPageIterator([ENTRY], NEW_TOKEN)
gax_api = _GAXLoggingAPI(_list_log_entries_response=response)
api = self._makeOne(gax_api)
Expand All @@ -138,12 +145,14 @@ def test_list_entries_with_extra_properties(self):
self.assertEqual(len(entries), 1)
entry = entries[0]
self.assertIsInstance(entry, dict)
self.assertEqual(entry['log_name'], self.LOG_NAME)
self.assertEqual(entry['resource'], {'type': 'global'})
self.assertEqual(entry['proto_payload'], PAYLOAD)
self.assertEqual(entry['logName'], self.LOG_NAME)
self.assertEqual(entry['resource'],
{'type': 'global', 'labels': {'foo': 'bar'}})
self.assertEqual(entry['protoPayload'], PAYLOAD)
self.assertEqual(entry['severity'], SEVERITY)
self.assertEqual(entry['labels'], LABELS)
self.assertEqual(entry['insert_id'], IID)
self.assertEqual(entry['insertId'], IID)
self.assertEqual(entry['timestamp'], _datetime_to_rfc3339(NOW))
EXPECTED_REQUEST = {
'request_method': request.request_method,
'request_url': request.request_url,
Expand All @@ -155,7 +164,7 @@ def test_list_entries_with_extra_properties(self):
'remote_ip': request.remote_ip,
'cache_hit': request.cache_hit,
}
self.assertEqual(entry['http_request'], EXPECTED_REQUEST)
self.assertEqual(entry['httpRequest'], EXPECTED_REQUEST)
EXPECTED_OPERATION = {
'producer': operation.producer,
'id': operation.id,
Expand All @@ -178,9 +187,9 @@ def test_write_entries_single(self):
TEXT = 'TEXT'
LOG_PATH = 'projects/%s/logs/%s' % (self.PROJECT, self.LOG_NAME)
ENTRY = {
'log_name': LOG_PATH,
'logName': LOG_PATH,
'resource': {'type': 'global'},
'text_payload': TEXT,
'textPayload': TEXT,
}
gax_api = _GAXLoggingAPI()
api = self._makeOne(gax_api)
Expand Down Expand Up @@ -227,15 +236,15 @@ def test_write_entries_w_extra_properties(self):
USER_AGENT = 'Agent/1.0'
REMOTE_IP = '1.2.3.4'
REQUEST = {
'request_method': REQUEST_METHOD,
'request_url': REQUEST_URL,
'requestMethod': REQUEST_METHOD,
'requestUrl': REQUEST_URL,
'status': STATUS,
'request_size': REQUEST_SIZE,
'response_size': RESPONSE_SIZE,
'requestSize': REQUEST_SIZE,
'responseSize': RESPONSE_SIZE,
'referer': REFERRER_URL,
'user_agent': USER_AGENT,
'remote_ip': REMOTE_IP,
'cache_hit': False,
'userAgent': USER_AGENT,
'remoteIp': REMOTE_IP,
'cacheHit': False,
}
PRODUCER = 'PRODUCER'
OPID = 'OPID'
Expand All @@ -246,14 +255,14 @@ def test_write_entries_w_extra_properties(self):
'last': True,
}
ENTRY = {
'log_name': LOG_PATH,
'logName': LOG_PATH,
'resource': {'type': 'global'},
'text_payload': TEXT,
'textPayload': TEXT,
'severity': SEVERITY,
'labels': LABELS,
'insert_id': IID,
'insertId': IID,
'timestamp': NOW,
'http_request': REQUEST,
'httpRequest': REQUEST,
'operation': OPERATION,
}
gax_api = _GAXLoggingAPI()
Expand Down Expand Up @@ -302,28 +311,30 @@ def test_write_entries_w_extra_properties(self):

def test_write_entries_multiple(self):
# pylint: disable=too-many-statements
import datetime
from google.logging.type.log_severity_pb2 import WARNING
from google.logging.v2.log_entry_pb2 import LogEntry
from google.protobuf.any_pb2 import Any
from google.protobuf.struct_pb2 import Struct
from gcloud._helpers import _datetime_to_rfc3339, UTC
TEXT = 'TEXT'
TIMESTAMP = _LogEntryPB._make_timestamp()
NOW = datetime.datetime.utcnow().replace(tzinfo=UTC)
TIMESTAMP_TYPE_URL = 'type.googleapis.com/google.protobuf.Timestamp'
JSON = {'payload': 'PAYLOAD', 'type': 'json'}
PROTO = {
'@type': TIMESTAMP_TYPE_URL,
'value': TIMESTAMP,
'value': _datetime_to_rfc3339(NOW),
}
PRODUCER = 'PRODUCER'
OPID = 'OPID'
URL = 'http://example.com/'
ENTRIES = [
{'text_payload': TEXT,
{'textPayload': TEXT,
'severity': WARNING},
{'json_payload': JSON,
{'jsonPayload': JSON,
'operation': {'producer': PRODUCER, 'id': OPID}},
{'proto_payload': PROTO,
'http_request': {'request_url': URL}},
{'protoPayload': PROTO,
'httpRequest': {'requestUrl': URL}},
]
LOG_PATH = 'projects/%s/logs/%s' % (self.PROJECT, self.LOG_NAME)
RESOURCE = {
Expand Down Expand Up @@ -956,6 +967,13 @@ class _LogEntryOperationPB(object):
id = 'OPID'


class _ResourcePB(object):

def __init__(self, type_='global', **labels):
self.type = type_
self.labels = labels


class _LogEntryPB(object):

severity = 'DEFAULT'
Expand All @@ -964,7 +982,7 @@ class _LogEntryPB(object):

def __init__(self, log_name, **kw):
self.log_name = log_name
self.resource = {'type': 'global'}
self.resource = _ResourcePB()
self.timestamp = self._make_timestamp()
self.labels = kw.pop('labels', {})
self.__dict__.update(kw)
Expand All @@ -973,9 +991,9 @@ def __init__(self, log_name, **kw):
def _make_timestamp():
from datetime import datetime
from gcloud._helpers import UTC
from gcloud.logging.test_entries import _datetime_to_rfc3339_w_nanos
from gcloud._helpers import _datetime_to_pb_timestamp
NOW = datetime.utcnow().replace(tzinfo=UTC)
return _datetime_to_rfc3339_w_nanos(NOW)
return _datetime_to_pb_timestamp(NOW)


class _LogSinkPB(object):
Expand Down