diff --git a/logging/google/cloud/logging/_gax.py b/logging/google/cloud/logging/_gax.py index e2f048fbd54f..d1e6196bbebb 100644 --- a/logging/google/cloud/logging/_gax.py +++ b/logging/google/cloud/logging/_gax.py @@ -243,6 +243,8 @@ def sink_get(self, project, sink_name): if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: raise NotFound(path) raise + # NOTE: LogSink message type does not have an ``Any`` field + # so `MessageToDict`` can safely be used. return MessageToDict(sink_pb) def sink_update(self, project, sink_name, filter_, destination): @@ -270,11 +272,13 @@ def sink_update(self, project, sink_name, filter_, destination): path = 'projects/%s/sinks/%s' % (project, sink_name) sink_pb = LogSink(name=path, filter=filter_, destination=destination) try: - self._gax_api.update_sink(path, sink_pb, options=options) + sink_pb = self._gax_api.update_sink(path, sink_pb, options=options) except GaxError as exc: if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: raise NotFound(path) raise + # NOTE: LogSink message type does not have an ``Any`` field + # so `MessageToDict`` can safely be used. return MessageToDict(sink_pb) def sink_delete(self, project, sink_name): @@ -391,6 +395,8 @@ def metric_get(self, project, metric_name): if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: raise NotFound(path) raise + # NOTE: LogMetric message type does not have an ``Any`` field + # so `MessageToDict`` can safely be used. return MessageToDict(metric_pb) def metric_update(self, project, metric_name, filter_, description): @@ -418,11 +424,14 @@ def metric_update(self, project, metric_name, filter_, description): metric_pb = LogMetric(name=path, filter=filter_, description=description) try: - self._gax_api.update_log_metric(path, metric_pb, options=options) + metric_pb = self._gax_api.update_log_metric( + path, metric_pb, options=options) except GaxError as exc: if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: raise NotFound(path) raise + # NOTE: LogMetric message type does not have an ``Any`` field + # so `MessageToDict`` can safely be used. return MessageToDict(metric_pb) def metric_delete(self, project, metric_name): @@ -444,6 +453,35 @@ def metric_delete(self, project, metric_name): raise +def _parse_log_entry(entry_pb): + """Special helper to parse ``LogEntry`` protobuf into a dictionary. + + The ``proto_payload`` field in ``LogEntry`` is of type ``Any``. This + can be problematic if the type URL in the payload isn't in the + ``google.protobuf`` registry. To help with parsing unregistered types, + this function will remove ``proto_payload`` before parsing. + + :type entry_pb: :class:`.log_entry_pb2.LogEntry` + :param entry_pb: Log entry protobuf. + + :rtype: dict + :returns: The parsed log entry. The ``protoPayload`` key may contain + the raw ``Any`` protobuf from ``entry_pb.proto_payload`` if + it could not be parsed. + """ + try: + return MessageToDict(entry_pb) + except TypeError: + if entry_pb.HasField('proto_payload'): + proto_payload = entry_pb.proto_payload + entry_pb.ClearField('proto_payload') + entry_mapping = MessageToDict(entry_pb) + entry_mapping['protoPayload'] = proto_payload + return entry_mapping + else: + raise + + def _log_entry_mapping_to_pb(mapping): """Helper for :meth:`write_entries`, et aliae @@ -451,6 +489,13 @@ def _log_entry_mapping_to_pb(mapping): the keys expected in the JSON API. """ entry_pb = LogEntry() + # NOTE: We assume ``mapping`` was created in ``Batch.commit`` + # or ``Logger._make_entry_resource``. In either case, if + # the ``protoPayload`` key is present, we assume that the + # type URL is registered with ``google.protobuf`` and will + # not cause any issues in the JSON->protobuf conversion + # of the corresponding ``proto_payload`` in the log entry + # (it is an ``Any`` field). ParseDict(mapping, entry_pb) return entry_pb @@ -482,7 +527,7 @@ def _item_to_entry(iterator, entry_pb, loggers): :rtype: :class:`~google.cloud.logging.entries._BaseEntry` :returns: The next log entry in the page. """ - resource = MessageToDict(entry_pb) + resource = _parse_log_entry(entry_pb) return entry_from_resource(resource, iterator.client, loggers) @@ -499,6 +544,8 @@ def _item_to_sink(iterator, log_sink_pb): :rtype: :class:`~google.cloud.logging.sink.Sink` :returns: The next sink in the page. """ + # NOTE: LogSink message type does not have an ``Any`` field + # so `MessageToDict`` can safely be used. resource = MessageToDict(log_sink_pb) return Sink.from_api_repr(resource, iterator.client) @@ -516,6 +563,8 @@ def _item_to_metric(iterator, log_metric_pb): :rtype: :class:`~google.cloud.logging.metric.Metric` :returns: The next metric in the page. """ + # NOTE: LogMetric message type does not have an ``Any`` field + # so `MessageToDict`` can safely be used. resource = MessageToDict(log_metric_pb) return Metric.from_api_repr(resource, iterator.client) diff --git a/logging/google/cloud/logging/_http.py b/logging/google/cloud/logging/_http.py index d9e7e4dacacd..0838e7fe42ac 100644 --- a/logging/google/cloud/logging/_http.py +++ b/logging/google/cloud/logging/_http.py @@ -286,6 +286,9 @@ def sink_update(self, project, sink_name, filter_, destination): :type destination: str :param destination: destination URI for the entries exported by the sink. + + :rtype: dict + :returns: The returned (updated) resource. """ target = '/projects/%s/sinks/%s' % (project, sink_name) data = { @@ -293,7 +296,7 @@ def sink_update(self, project, sink_name, filter_, destination): 'filter': filter_, 'destination': destination, } - self.api_request(method='PUT', path=target, data=data) + return self.api_request(method='PUT', path=target, data=data) def sink_delete(self, project, sink_name): """API call: delete a sink resource. @@ -421,6 +424,9 @@ def metric_update(self, project, metric_name, filter_, description): :type description: str :param description: description of the metric. + + :rtype: dict + :returns: The returned (updated) resource. """ target = '/projects/%s/metrics/%s' % (project, metric_name) data = { @@ -428,7 +434,7 @@ def metric_update(self, project, metric_name, filter_, description): 'filter': filter_, 'description': description, } - self.api_request(method='PUT', path=target, data=data) + return self.api_request(method='PUT', path=target, data=data) def metric_delete(self, project, metric_name): """API call: delete a metric resource. diff --git a/logging/google/cloud/logging/entries.py b/logging/google/cloud/logging/entries.py index 1ae5d34ec8b9..284562c5de5b 100644 --- a/logging/google/cloud/logging/entries.py +++ b/logging/google/cloud/logging/entries.py @@ -17,6 +17,7 @@ import json import re +from google.protobuf import any_pb2 from google.protobuf.json_format import Parse from google.cloud._helpers import _name_from_project_path @@ -47,7 +48,7 @@ def logger_name_from_path(path): class _BaseEntry(object): - """Base class for TextEntry, StructEntry. + """Base class for TextEntry, StructEntry, ProtobufEntry. :type payload: text or dict :param payload: The payload passed as ``textPayload``, ``jsonPayload``, @@ -99,7 +100,7 @@ def from_api_repr(cls, resource, client, loggers=None): (Optional) A mapping of logger fullnames -> loggers. If not passed, the entry will have a newly-created logger. - :rtype: :class:`google.cloud.logging.entries.TextEntry` + :rtype: :class:`google.cloud.logging.entries._BaseEntry` :returns: Text entry parsed from ``resource``. """ if loggers is None: @@ -144,9 +145,45 @@ class ProtobufEntry(_BaseEntry): See: https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry + + :type payload: str, dict or any_pb2.Any + :param payload: The payload passed as ``textPayload``, ``jsonPayload``, + or ``protoPayload``. This also may be passed as a raw + :class:`.any_pb2.Any` if the ``protoPayload`` could + not be deserialized. + + :type logger: :class:`~google.cloud.logging.logger.Logger` + :param logger: the logger used to write the entry. + + :type insert_id: str + :param insert_id: (optional) the ID used to identify an entry uniquely. + + :type timestamp: :class:`datetime.datetime` + :param timestamp: (optional) timestamp for the entry + + :type labels: dict + :param labels: (optional) mapping of labels for the entry + + :type severity: str + :param severity: (optional) severity of event being logged. + + :type http_request: dict + :param http_request: (optional) info about HTTP request associated with + the entry """ _PAYLOAD_KEY = 'protoPayload' + def __init__(self, payload, logger, insert_id=None, timestamp=None, + labels=None, severity=None, http_request=None): + super(ProtobufEntry, self).__init__( + payload, logger, insert_id=insert_id, timestamp=timestamp, + labels=labels, severity=severity, http_request=http_request) + if isinstance(self.payload, any_pb2.Any): + self.payload_pb = self.payload + self.payload = None + else: + self.payload_pb = None + def parse_message(self, message): """Parse payload into a protobuf message. @@ -155,4 +192,7 @@ def parse_message(self, message): :type message: Protobuf message :param message: the message to be logged """ + # NOTE: This assumes that ``payload`` is already a deserialized + # ``Any`` field and ``message`` has come from an imported + # ``pb2`` module with the relevant protobuf message type. Parse(json.dumps(self.payload), message) diff --git a/logging/google/cloud/logging/logger.py b/logging/google/cloud/logging/logger.py index 459647bbea67..f093e6e48c88 100644 --- a/logging/google/cloud/logging/logger.py +++ b/logging/google/cloud/logging/logger.py @@ -14,9 +14,7 @@ """Define API Loggers.""" -import json - -from google.protobuf.json_format import MessageToJson +from google.protobuf.json_format import MessageToDict from google.cloud._helpers import _datetime_to_rfc3339 @@ -106,24 +104,24 @@ def _make_entry_resource(self, text=None, info=None, message=None, :type info: dict :param info: (Optional) struct payload - :type message: Protobuf message or :class:`NoneType` - :param message: protobuf payload + :type message: :class:`~google.protobuf.message.Message` + :param message: (Optional) The protobuf payload to log. :type labels: dict :param labels: (Optional) labels passed in to calling method. :type insert_id: str - :param insert_id: (optional) unique ID for log entry. + :param insert_id: (Optional) unique ID for log entry. :type severity: str - :param severity: (optional) severity of event being logged. + :param severity: (Optional) severity of event being logged. :type http_request: dict - :param http_request: (optional) info about HTTP request associated with + :param http_request: (Optional) info about HTTP request associated with the entry :type timestamp: :class:`datetime.datetime` - :param timestamp: (optional) timestamp of event being logged. + :param timestamp: (Optional) timestamp of event being logged. :rtype: dict :returns: The JSON resource created. @@ -140,9 +138,13 @@ def _make_entry_resource(self, text=None, info=None, message=None, resource['jsonPayload'] = info if message is not None: - as_json_str = MessageToJson(message) - as_json = json.loads(as_json_str) - resource['protoPayload'] = as_json + # NOTE: If ``message`` contains an ``Any`` field with an + # unknown type, this will fail with a ``TypeError``. + # However, since ``message`` will be provided by a user, + # the assumption is that any types needed for the + # protobuf->JSON conversion will be known from already + # imported ``pb2`` modules. + resource['protoPayload'] = MessageToDict(message) if labels is None: labels = self.labels @@ -245,8 +247,8 @@ def log_proto(self, message, client=None, labels=None, insert_id=None, See: https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/list - :type message: Protobuf message - :param message: the message to be logged + :type message: :class:`~google.protobuf.message.Message` + :param message: The protobuf message to be logged. :type client: :class:`~google.cloud.logging.client.Client` or ``NoneType`` @@ -462,9 +464,13 @@ def commit(self, client=None): elif entry_type == 'struct': info = {'jsonPayload': entry} elif entry_type == 'proto': - as_json_str = MessageToJson(entry) - as_json = json.loads(as_json_str) - info = {'protoPayload': as_json} + # NOTE: If ``entry`` contains an ``Any`` field with an + # unknown type, this will fail with a ``TypeError``. + # However, since ``entry`` was provided by a user in + # ``Batch.log_proto``, the assumption is that any types + # needed for the protobuf->JSON conversion will be known + # from already imported ``pb2`` modules. + info = {'protoPayload': MessageToDict(entry)} else: raise ValueError('Unknown entry type: %s' % (entry_type,)) if labels is not None: diff --git a/logging/tests/system.py b/logging/tests/system.py index f564ec3dcbdf..3bd8d5fb2139 100644 --- a/logging/tests/system.py +++ b/logging/tests/system.py @@ -108,6 +108,7 @@ class TestLogging(unittest.TestCase): 'precipitation': False, }, } + TYPE_FILTER = 'protoPayload.@type = "{}"' def setUp(self): self.to_delete = [] @@ -123,6 +124,31 @@ def tearDown(self): def _logger_name(): return 'system-tests-logger' + unique_resource_id('-') + def test_list_entry_with_unregistered(self): + from google.protobuf import any_pb2 + from google.protobuf import descriptor_pool + from google.cloud.logging import entries + + pool = descriptor_pool.Default() + type_name = 'google.cloud.audit.AuditLog' + # Make sure the descriptor is not known in the registry. + with self.assertRaises(KeyError): + pool.FindMessageTypeByName(type_name) + + type_url = 'type.googleapis.com/' + type_name + filter_ = self.TYPE_FILTER.format(type_url) + entry_iter = iter( + Config.CLIENT.list_entries(page_size=1, filter_=filter_)) + protobuf_entry = next(entry_iter) + self.assertIsInstance(protobuf_entry, entries.ProtobufEntry) + if Config.CLIENT._use_grpc: + self.assertIsNone(protobuf_entry.payload) + self.assertIsInstance(protobuf_entry.payload_pb, any_pb2.Any) + self.assertEqual(protobuf_entry.payload_pb.type_url, type_url) + else: + self.assertIsNone(protobuf_entry.payload_pb) + self.assertEqual(protobuf_entry.payload['@type'], type_url) + def test_log_text(self): TEXT_PAYLOAD = 'System test: test_log_text' logger = Config.CLIENT.logger(self._logger_name()) diff --git a/logging/tests/unit/test__gax.py b/logging/tests/unit/test__gax.py index 67830a99db42..57041097efac 100644 --- a/logging/tests/unit/test__gax.py +++ b/logging/tests/unit/test__gax.py @@ -1100,6 +1100,172 @@ def test_metric_delete_hit(self): self.assertIsNone(options) +@unittest.skipUnless(_HAVE_GRPC, 'No gax-python') +class Test__parse_log_entry(unittest.TestCase): + + @staticmethod + def _call_fut(*args, **kwargs): + from google.cloud.logging._gax import _parse_log_entry + + return _parse_log_entry(*args, **kwargs) + + def test_simple(self): + from google.cloud.proto.logging.v2.log_entry_pb2 import LogEntry + + entry_pb = LogEntry(log_name=u'lol-jk', text_payload=u'bah humbug') + result = self._call_fut(entry_pb) + expected = { + 'logName': entry_pb.log_name, + 'textPayload': entry_pb.text_payload, + } + self.assertEqual(result, expected) + + @mock.patch('google.cloud.logging._gax.MessageToDict', + side_effect=TypeError) + def test_non_registry_failure(self, msg_to_dict_mock): + entry_pb = mock.Mock(spec=['HasField']) + entry_pb.HasField.return_value = False + with self.assertRaises(TypeError): + self._call_fut(entry_pb) + + entry_pb.HasField.assert_called_once_with('proto_payload') + msg_to_dict_mock.assert_called_once_with(entry_pb) + + def test_unregistered_type(self): + from google.cloud.proto.logging.v2.log_entry_pb2 import LogEntry + from google.protobuf import any_pb2 + from google.protobuf import descriptor_pool + from google.protobuf.timestamp_pb2 import Timestamp + + pool = descriptor_pool.Default() + type_name = 'google.bigtable.admin.v2.UpdateClusterMetadata' + # Make sure the descriptor is not known in the registry. + with self.assertRaises(KeyError): + pool.FindMessageTypeByName(type_name) + + type_url = 'type.googleapis.com/' + type_name + metadata_bytes = ( + b'\n\n\n\x03foo\x12\x03bar\x12\x06\x08\xbd\xb6\xfb\xc6\x05') + any_pb = any_pb2.Any(type_url=type_url, value=metadata_bytes) + timestamp = Timestamp(seconds=61, nanos=1234000) + + entry_pb = LogEntry(proto_payload=any_pb, timestamp=timestamp) + result = self._call_fut(entry_pb) + expected = { + 'protoPayload': any_pb, + 'timestamp': '1970-01-01T00:01:01.001234Z', + } + self.assertEqual(result, expected) + + def test_registered_type(self): + from google.cloud.proto.logging.v2.log_entry_pb2 import LogEntry + from google.protobuf import any_pb2 + from google.protobuf import descriptor_pool + from google.protobuf.struct_pb2 import Struct + from google.protobuf.struct_pb2 import Value + + pool = descriptor_pool.Default() + type_name = 'google.protobuf.Struct' + # Make sure the descriptor is known in the registry. + descriptor = pool.FindMessageTypeByName(type_name) + self.assertEqual(descriptor.name, 'Struct') + + type_url = 'type.googleapis.com/' + type_name + field_name = 'foo' + field_value = u'Bar' + struct_pb = Struct( + fields={field_name: Value(string_value=field_value)}) + any_pb = any_pb2.Any( + type_url=type_url, + value=struct_pb.SerializeToString(), + ) + + entry_pb = LogEntry(proto_payload=any_pb, log_name=u'all-good') + result = self._call_fut(entry_pb) + expected_proto = { + 'logName': entry_pb.log_name, + 'protoPayload': { + '@type': type_url, + 'value': {field_name: field_value}, + }, + } + self.assertEqual(result, expected_proto) + + +@unittest.skipUnless(_HAVE_GRPC, 'No gax-python') +class Test__log_entry_mapping_to_pb(unittest.TestCase): + + @staticmethod + def _call_fut(*args, **kwargs): + from google.cloud.logging._gax import _log_entry_mapping_to_pb + + return _log_entry_mapping_to_pb(*args, **kwargs) + + def test_simple(self): + from google.cloud.proto.logging.v2.log_entry_pb2 import LogEntry + + result = self._call_fut({}) + self.assertEqual(result, LogEntry()) + + def test_unregistered_type(self): + from google.protobuf import descriptor_pool + from google.protobuf.json_format import ParseError + + pool = descriptor_pool.Default() + type_name = 'google.bigtable.admin.v2.UpdateClusterMetadata' + # Make sure the descriptor is not known in the registry. + with self.assertRaises(KeyError): + pool.FindMessageTypeByName(type_name) + + type_url = 'type.googleapis.com/' + type_name + json_mapping = { + 'protoPayload': { + '@type': type_url, + 'originalRequest': { + 'name': 'foo', + 'location': 'bar', + }, + 'requestTime': { + 'seconds': 1491000125, + }, + }, + } + with self.assertRaises(ParseError): + self._call_fut(json_mapping) + + def test_registered_type(self): + from google.cloud.proto.logging.v2.log_entry_pb2 import LogEntry + from google.protobuf import any_pb2 + from google.protobuf import descriptor_pool + + pool = descriptor_pool.Default() + type_name = 'google.protobuf.Struct' + # Make sure the descriptor is known in the registry. + descriptor = pool.FindMessageTypeByName(type_name) + self.assertEqual(descriptor.name, 'Struct') + + type_url = 'type.googleapis.com/' + type_name + field_name = 'foo' + field_value = u'Bar' + json_mapping = { + 'logName': u'hi-everybody', + 'protoPayload': { + '@type': type_url, + 'value': {field_name: field_value}, + }, + } + # Convert to a valid LogEntry. + result = self._call_fut(json_mapping) + entry_pb = LogEntry( + log_name=json_mapping['logName'], + proto_payload=any_pb2.Any( + type_url=type_url, + value=b'\n\014\n\003foo\022\005\032\003Bar', + ), + ) + self.assertEqual(result, entry_pb) + + @unittest.skipUnless(_HAVE_GRPC, 'No gax-python') class Test_make_gax_logging_api(unittest.TestCase): diff --git a/logging/tests/unit/test_entries.py b/logging/tests/unit/test_entries.py index d39d72a27af8..4d254eb9d1ef 100644 --- a/logging/tests/unit/test_entries.py +++ b/logging/tests/unit/test_entries.py @@ -14,6 +14,8 @@ import unittest +import mock + class Test_logger_name_from_path(unittest.TestCase): @@ -207,6 +209,32 @@ def _get_target_class(): def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) + def test_constructor_basic(self): + payload = {'foo': 'bar'} + pb_entry = self._make_one(payload, mock.sentinel.logger) + self.assertEqual(pb_entry.payload, payload) + self.assertIsNone(pb_entry.payload_pb) + self.assertIs(pb_entry.logger, mock.sentinel.logger) + self.assertIsNone(pb_entry.insert_id) + self.assertIsNone(pb_entry.timestamp) + self.assertIsNone(pb_entry.labels) + self.assertIsNone(pb_entry.severity) + self.assertIsNone(pb_entry.http_request) + + def test_constructor_with_any(self): + from google.protobuf.any_pb2 import Any + + payload = Any() + pb_entry = self._make_one(payload, mock.sentinel.logger) + self.assertIs(pb_entry.payload_pb, payload) + self.assertIsNone(pb_entry.payload) + self.assertIs(pb_entry.logger, mock.sentinel.logger) + self.assertIsNone(pb_entry.insert_id) + self.assertIsNone(pb_entry.timestamp) + self.assertIsNone(pb_entry.labels) + self.assertIsNone(pb_entry.severity) + self.assertIsNone(pb_entry.http_request) + def test_parse_message(self): import json from google.protobuf.json_format import MessageToJson