Skip to content

Commit f6d654f

Browse files
authored
Merge pull request #1892 from tseaver/logging-gax_cleanups
_LoggingAPI: impedance matching with JSON API.
2 parents eb3c076 + be36a7d commit f6d654f

File tree

2 files changed

+101
-65
lines changed

2 files changed

+101
-65
lines changed

gcloud/logging/_gax.py

Lines changed: 52 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,8 @@
3232
from gcloud.exceptions import Conflict
3333
from gcloud.exceptions import NotFound
3434
from gcloud._helpers import _datetime_to_pb_timestamp
35+
from gcloud._helpers import _datetime_to_rfc3339
36+
from gcloud._helpers import _pb_timestamp_to_datetime
3537

3638

3739
class _LoggingAPI(object):
@@ -397,6 +399,22 @@ def _build_paging_options(page_token=None):
397399
return CallOptions(**options)
398400

399401

402+
def _mon_resource_pb_to_mapping(resource_pb):
403+
"""Helper for :func:_log_entry_pb_to_mapping"""
404+
mapping = {
405+
'type': resource_pb.type,
406+
}
407+
if resource_pb.labels:
408+
mapping['labels'] = resource_pb.labels
409+
return mapping
410+
411+
412+
def _pb_timestamp_to_rfc3339(timestamp_pb):
413+
"""Helper for :func:_log_entry_pb_to_mapping"""
414+
timestamp = _pb_timestamp_to_datetime(timestamp_pb)
415+
return _datetime_to_rfc3339(timestamp)
416+
417+
400418
def _log_entry_pb_to_mapping(entry_pb):
401419
"""Helper for :meth:`list_entries`, et aliae
402420
@@ -405,20 +423,20 @@ def _log_entry_pb_to_mapping(entry_pb):
405423
https://github.com/google/protobuf/issues/1351
406424
"""
407425
mapping = {
408-
'log_name': entry_pb.log_name,
409-
'resource': entry_pb.resource,
426+
'logName': entry_pb.log_name,
427+
'resource': _mon_resource_pb_to_mapping(entry_pb.resource),
410428
'severity': entry_pb.severity,
411-
'insert_id': entry_pb.insert_id,
412-
'timestamp': entry_pb.timestamp,
429+
'insertId': entry_pb.insert_id,
430+
'timestamp': _pb_timestamp_to_rfc3339(entry_pb.timestamp),
413431
'labels': entry_pb.labels,
414-
'text_payload': entry_pb.text_payload,
415-
'json_payload': entry_pb.json_payload,
416-
'proto_payload': entry_pb.proto_payload,
432+
'textPayload': entry_pb.text_payload,
433+
'jsonPayload': entry_pb.json_payload,
434+
'protoPayload': entry_pb.proto_payload,
417435
}
418436

419437
if entry_pb.http_request:
420438
request = entry_pb.http_request
421-
mapping['http_request'] = {
439+
mapping['httpRequest'] = {
422440
'request_method': request.request_method,
423441
'request_url': request.request_url,
424442
'status': request.status,
@@ -444,20 +462,20 @@ def _log_entry_pb_to_mapping(entry_pb):
444462

445463
def _http_request_mapping_to_pb(info, request):
446464
"""Helper for _log_entry_mapping_to_pb"""
447-
optional_request_keys = (
448-
'request_method',
449-
'request_url',
450-
'status',
451-
'referer',
452-
'user_agent',
453-
'cache_hit',
454-
'request_size',
455-
'response_size',
456-
'remote_ip',
457-
)
458-
for key in optional_request_keys:
465+
optional_request_keys = {
466+
'requestMethod': 'request_method',
467+
'requestUrl': 'request_url',
468+
'status': 'status',
469+
'referer': 'referer',
470+
'userAgent': 'user_agent',
471+
'cacheHit': 'cache_hit',
472+
'requestSize': 'request_size',
473+
'responseSize': 'response_size',
474+
'remoteIp': 'remote_ip',
475+
}
476+
for key, pb_name in optional_request_keys.items():
459477
if key in info:
460-
setattr(request, key, info[key])
478+
setattr(request, pb_name, info[key])
461479

462480

463481
def _log_operation_mapping_to_pb(info, operation):
@@ -482,15 +500,15 @@ def _log_entry_mapping_to_pb(mapping):
482500
# pylint: disable=too-many-branches
483501
entry_pb = LogEntry()
484502

485-
optional_scalar_keys = (
486-
'log_name',
487-
'insert_id',
488-
'text_payload',
489-
)
503+
optional_scalar_keys = {
504+
'logName': 'log_name',
505+
'insertId': 'insert_id',
506+
'textPayload': 'text_payload',
507+
}
490508

491-
for key in optional_scalar_keys:
509+
for key, pb_name in optional_scalar_keys.items():
492510
if key in mapping:
493-
setattr(entry_pb, key, mapping[key])
511+
setattr(entry_pb, pb_name, mapping[key])
494512

495513
if 'resource' in mapping:
496514
entry_pb.resource.type = mapping['resource']['type']
@@ -509,16 +527,16 @@ def _log_entry_mapping_to_pb(mapping):
509527
for key, value in mapping['labels'].items():
510528
entry_pb.labels[key] = value
511529

512-
if 'json_payload' in mapping:
513-
for key, value in mapping['json_payload'].items():
530+
if 'jsonPayload' in mapping:
531+
for key, value in mapping['jsonPayload'].items():
514532
entry_pb.json_payload[key] = value
515533

516-
if 'proto_payload' in mapping:
517-
Parse(json.dumps(mapping['proto_payload']), entry_pb.proto_payload)
534+
if 'protoPayload' in mapping:
535+
Parse(json.dumps(mapping['protoPayload']), entry_pb.proto_payload)
518536

519-
if 'http_request' in mapping:
537+
if 'httpRequest' in mapping:
520538
_http_request_mapping_to_pb(
521-
mapping['http_request'], entry_pb.http_request)
539+
mapping['httpRequest'], entry_pb.http_request)
522540

523541
if 'operation' in mapping:
524542
_log_operation_mapping_to_pb(

gcloud/logging/test__gax.py

Lines changed: 49 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -64,9 +64,9 @@ def test_list_entries_no_paging(self):
6464
self.assertEqual(len(entries), 1)
6565
entry = entries[0]
6666
self.assertIsInstance(entry, dict)
67-
self.assertEqual(entry['log_name'], self.LOG_NAME)
67+
self.assertEqual(entry['logName'], self.LOG_NAME)
6868
self.assertEqual(entry['resource'], {'type': 'global'})
69-
self.assertEqual(entry['text_payload'], TEXT)
69+
self.assertEqual(entry['textPayload'], TEXT)
7070
self.assertEqual(next_token, TOKEN)
7171

7272
projects, filter_, order_by, page_size, options = (
@@ -94,9 +94,9 @@ def test_list_entries_with_paging(self):
9494
self.assertEqual(len(entries), 1)
9595
entry = entries[0]
9696
self.assertIsInstance(entry, dict)
97-
self.assertEqual(entry['log_name'], self.LOG_NAME)
97+
self.assertEqual(entry['logName'], self.LOG_NAME)
9898
self.assertEqual(entry['resource'], {'type': 'global'})
99-
self.assertEqual(entry['json_payload'], PAYLOAD)
99+
self.assertEqual(entry['jsonPayload'], PAYLOAD)
100100
self.assertEqual(next_token, NEW_TOKEN)
101101

102102
projects, filter_, order_by, page_size, options = (
@@ -108,7 +108,12 @@ def test_list_entries_with_paging(self):
108108
self.assertEqual(options.page_token, TOKEN)
109109

110110
def test_list_entries_with_extra_properties(self):
111+
from datetime import datetime
111112
from gcloud._testing import _GAXPageIterator
113+
from gcloud._helpers import UTC
114+
from gcloud._helpers import _datetime_to_rfc3339
115+
from gcloud._helpers import _datetime_to_pb_timestamp
116+
NOW = datetime.utcnow().replace(tzinfo=UTC)
112117
SIZE = 23
113118
TOKEN = 'TOKEN'
114119
NEW_TOKEN = 'NEW_TOKEN'
@@ -128,6 +133,8 @@ def test_list_entries_with_extra_properties(self):
128133
'operation': operation,
129134
}
130135
ENTRY = _LogEntryPB(self.LOG_NAME, proto_payload=PAYLOAD, **EXTRAS)
136+
ENTRY.resource.labels['foo'] = 'bar'
137+
ENTRY.timestamp = _datetime_to_pb_timestamp(NOW)
131138
response = _GAXPageIterator([ENTRY], NEW_TOKEN)
132139
gax_api = _GAXLoggingAPI(_list_log_entries_response=response)
133140
api = self._makeOne(gax_api)
@@ -138,12 +145,14 @@ def test_list_entries_with_extra_properties(self):
138145
self.assertEqual(len(entries), 1)
139146
entry = entries[0]
140147
self.assertIsInstance(entry, dict)
141-
self.assertEqual(entry['log_name'], self.LOG_NAME)
142-
self.assertEqual(entry['resource'], {'type': 'global'})
143-
self.assertEqual(entry['proto_payload'], PAYLOAD)
148+
self.assertEqual(entry['logName'], self.LOG_NAME)
149+
self.assertEqual(entry['resource'],
150+
{'type': 'global', 'labels': {'foo': 'bar'}})
151+
self.assertEqual(entry['protoPayload'], PAYLOAD)
144152
self.assertEqual(entry['severity'], SEVERITY)
145153
self.assertEqual(entry['labels'], LABELS)
146-
self.assertEqual(entry['insert_id'], IID)
154+
self.assertEqual(entry['insertId'], IID)
155+
self.assertEqual(entry['timestamp'], _datetime_to_rfc3339(NOW))
147156
EXPECTED_REQUEST = {
148157
'request_method': request.request_method,
149158
'request_url': request.request_url,
@@ -155,7 +164,7 @@ def test_list_entries_with_extra_properties(self):
155164
'remote_ip': request.remote_ip,
156165
'cache_hit': request.cache_hit,
157166
}
158-
self.assertEqual(entry['http_request'], EXPECTED_REQUEST)
167+
self.assertEqual(entry['httpRequest'], EXPECTED_REQUEST)
159168
EXPECTED_OPERATION = {
160169
'producer': operation.producer,
161170
'id': operation.id,
@@ -178,9 +187,9 @@ def test_write_entries_single(self):
178187
TEXT = 'TEXT'
179188
LOG_PATH = 'projects/%s/logs/%s' % (self.PROJECT, self.LOG_NAME)
180189
ENTRY = {
181-
'log_name': LOG_PATH,
190+
'logName': LOG_PATH,
182191
'resource': {'type': 'global'},
183-
'text_payload': TEXT,
192+
'textPayload': TEXT,
184193
}
185194
gax_api = _GAXLoggingAPI()
186195
api = self._makeOne(gax_api)
@@ -227,15 +236,15 @@ def test_write_entries_w_extra_properties(self):
227236
USER_AGENT = 'Agent/1.0'
228237
REMOTE_IP = '1.2.3.4'
229238
REQUEST = {
230-
'request_method': REQUEST_METHOD,
231-
'request_url': REQUEST_URL,
239+
'requestMethod': REQUEST_METHOD,
240+
'requestUrl': REQUEST_URL,
232241
'status': STATUS,
233-
'request_size': REQUEST_SIZE,
234-
'response_size': RESPONSE_SIZE,
242+
'requestSize': REQUEST_SIZE,
243+
'responseSize': RESPONSE_SIZE,
235244
'referer': REFERRER_URL,
236-
'user_agent': USER_AGENT,
237-
'remote_ip': REMOTE_IP,
238-
'cache_hit': False,
245+
'userAgent': USER_AGENT,
246+
'remoteIp': REMOTE_IP,
247+
'cacheHit': False,
239248
}
240249
PRODUCER = 'PRODUCER'
241250
OPID = 'OPID'
@@ -246,14 +255,14 @@ def test_write_entries_w_extra_properties(self):
246255
'last': True,
247256
}
248257
ENTRY = {
249-
'log_name': LOG_PATH,
258+
'logName': LOG_PATH,
250259
'resource': {'type': 'global'},
251-
'text_payload': TEXT,
260+
'textPayload': TEXT,
252261
'severity': SEVERITY,
253262
'labels': LABELS,
254-
'insert_id': IID,
263+
'insertId': IID,
255264
'timestamp': NOW,
256-
'http_request': REQUEST,
265+
'httpRequest': REQUEST,
257266
'operation': OPERATION,
258267
}
259268
gax_api = _GAXLoggingAPI()
@@ -302,28 +311,30 @@ def test_write_entries_w_extra_properties(self):
302311

303312
def test_write_entries_multiple(self):
304313
# pylint: disable=too-many-statements
314+
import datetime
305315
from google.logging.type.log_severity_pb2 import WARNING
306316
from google.logging.v2.log_entry_pb2 import LogEntry
307317
from google.protobuf.any_pb2 import Any
308318
from google.protobuf.struct_pb2 import Struct
319+
from gcloud._helpers import _datetime_to_rfc3339, UTC
309320
TEXT = 'TEXT'
310-
TIMESTAMP = _LogEntryPB._make_timestamp()
321+
NOW = datetime.datetime.utcnow().replace(tzinfo=UTC)
311322
TIMESTAMP_TYPE_URL = 'type.googleapis.com/google.protobuf.Timestamp'
312323
JSON = {'payload': 'PAYLOAD', 'type': 'json'}
313324
PROTO = {
314325
'@type': TIMESTAMP_TYPE_URL,
315-
'value': TIMESTAMP,
326+
'value': _datetime_to_rfc3339(NOW),
316327
}
317328
PRODUCER = 'PRODUCER'
318329
OPID = 'OPID'
319330
URL = 'http://example.com/'
320331
ENTRIES = [
321-
{'text_payload': TEXT,
332+
{'textPayload': TEXT,
322333
'severity': WARNING},
323-
{'json_payload': JSON,
334+
{'jsonPayload': JSON,
324335
'operation': {'producer': PRODUCER, 'id': OPID}},
325-
{'proto_payload': PROTO,
326-
'http_request': {'request_url': URL}},
336+
{'protoPayload': PROTO,
337+
'httpRequest': {'requestUrl': URL}},
327338
]
328339
LOG_PATH = 'projects/%s/logs/%s' % (self.PROJECT, self.LOG_NAME)
329340
RESOURCE = {
@@ -956,6 +967,13 @@ class _LogEntryOperationPB(object):
956967
id = 'OPID'
957968

958969

970+
class _ResourcePB(object):
971+
972+
def __init__(self, type_='global', **labels):
973+
self.type = type_
974+
self.labels = labels
975+
976+
959977
class _LogEntryPB(object):
960978

961979
severity = 'DEFAULT'
@@ -964,7 +982,7 @@ class _LogEntryPB(object):
964982

965983
def __init__(self, log_name, **kw):
966984
self.log_name = log_name
967-
self.resource = {'type': 'global'}
985+
self.resource = _ResourcePB()
968986
self.timestamp = self._make_timestamp()
969987
self.labels = kw.pop('labels', {})
970988
self.__dict__.update(kw)
@@ -973,9 +991,9 @@ def __init__(self, log_name, **kw):
973991
def _make_timestamp():
974992
from datetime import datetime
975993
from gcloud._helpers import UTC
976-
from gcloud.logging.test_entries import _datetime_to_rfc3339_w_nanos
994+
from gcloud._helpers import _datetime_to_pb_timestamp
977995
NOW = datetime.utcnow().replace(tzinfo=UTC)
978-
return _datetime_to_rfc3339_w_nanos(NOW)
996+
return _datetime_to_pb_timestamp(NOW)
979997

980998

981999
class _LogSinkPB(object):

0 commit comments

Comments
 (0)