2019-08-28 14:18:53 +03:00
|
|
|
#
|
2023-11-21 23:29:58 +03:00
|
|
|
# This file is licensed under the Affero General Public License (AGPL) version 3.
|
|
|
|
#
|
|
|
|
# Copyright (C) 2023 New Vector, Ltd
|
|
|
|
#
|
|
|
|
# This program is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU Affero General Public License as
|
|
|
|
# published by the Free Software Foundation, either version 3 of the
|
|
|
|
# License, or (at your option) any later version.
|
|
|
|
#
|
|
|
|
# See the GNU Affero General Public License for more details:
|
|
|
|
# <https://www.gnu.org/licenses/agpl-3.0.html>.
|
|
|
|
#
|
|
|
|
# Originally licensed under the Apache License, Version 2.0:
|
|
|
|
# <http://www.apache.org/licenses/LICENSE-2.0>.
|
|
|
|
#
|
|
|
|
# [This file includes modifications made by New Vector Limited]
|
2019-08-28 14:18:53 +03:00
|
|
|
#
|
|
|
|
#
|
|
|
|
import json
|
2020-10-29 14:27:37 +03:00
|
|
|
import logging
|
2021-04-08 15:01:14 +03:00
|
|
|
from io import BytesIO, StringIO
|
2023-01-26 22:45:24 +03:00
|
|
|
from typing import cast
|
2021-04-09 20:44:38 +03:00
|
|
|
from unittest.mock import Mock, patch
|
2021-04-08 15:01:14 +03:00
|
|
|
|
2023-01-26 22:45:24 +03:00
|
|
|
from twisted.web.http import HTTPChannel
|
2021-04-08 15:01:14 +03:00
|
|
|
from twisted.web.server import Request
|
2019-08-28 14:18:53 +03:00
|
|
|
|
2021-04-08 15:01:14 +03:00
|
|
|
from synapse.http.site import SynapseRequest
|
2020-10-29 14:27:37 +03:00
|
|
|
from synapse.logging._terse_json import JsonFormatter, TerseJsonFormatter
|
2020-12-11 15:25:01 +03:00
|
|
|
from synapse.logging.context import LoggingContext, LoggingContextFilter
|
2023-01-26 22:45:24 +03:00
|
|
|
from synapse.types import JsonDict
|
2019-08-28 14:18:53 +03:00
|
|
|
|
2020-10-29 14:27:37 +03:00
|
|
|
from tests.logging import LoggerCleanupMixin
|
2023-01-26 22:45:24 +03:00
|
|
|
from tests.server import FakeChannel, get_clock
|
2020-10-29 14:27:37 +03:00
|
|
|
from tests.unittest import TestCase
|
2019-08-28 14:18:53 +03:00
|
|
|
|
|
|
|
|
2020-10-29 14:27:37 +03:00
|
|
|
class TerseJsonTestCase(LoggerCleanupMixin, TestCase):
|
2023-01-26 22:45:24 +03:00
|
|
|
def setUp(self) -> None:
|
2020-12-11 15:25:01 +03:00
|
|
|
self.output = StringIO()
|
2023-01-26 22:45:24 +03:00
|
|
|
self.reactor, _ = get_clock()
|
2020-12-11 15:25:01 +03:00
|
|
|
|
2023-01-26 22:45:24 +03:00
|
|
|
def get_log_line(self) -> JsonDict:
|
2020-12-11 15:25:01 +03:00
|
|
|
# One log message, with a single trailing newline.
|
|
|
|
data = self.output.getvalue()
|
|
|
|
logs = data.splitlines()
|
|
|
|
self.assertEqual(len(logs), 1)
|
|
|
|
self.assertEqual(data.count("\n"), 1)
|
|
|
|
return json.loads(logs[0])
|
|
|
|
|
2023-01-26 22:45:24 +03:00
|
|
|
def test_terse_json_output(self) -> None:
|
2019-08-28 14:18:53 +03:00
|
|
|
"""
|
2020-10-29 14:27:37 +03:00
|
|
|
The Terse JSON formatter converts log messages to JSON.
|
2019-08-28 14:18:53 +03:00
|
|
|
"""
|
2020-12-11 15:25:01 +03:00
|
|
|
handler = logging.StreamHandler(self.output)
|
2020-10-29 14:27:37 +03:00
|
|
|
handler.setFormatter(TerseJsonFormatter())
|
|
|
|
logger = self.get_logger(handler)
|
2019-08-28 14:18:53 +03:00
|
|
|
|
2020-10-29 14:27:37 +03:00
|
|
|
logger.info("Hello there, %s!", "wally")
|
2019-08-28 14:18:53 +03:00
|
|
|
|
2020-12-11 15:25:01 +03:00
|
|
|
log = self.get_log_line()
|
2019-08-28 14:18:53 +03:00
|
|
|
|
|
|
|
# The terse logger should give us these keys.
|
|
|
|
expected_log_keys = [
|
|
|
|
"log",
|
|
|
|
"time",
|
|
|
|
"level",
|
2020-10-29 14:27:37 +03:00
|
|
|
"namespace",
|
2019-08-28 14:18:53 +03:00
|
|
|
]
|
2020-10-21 13:59:54 +03:00
|
|
|
self.assertCountEqual(log.keys(), expected_log_keys)
|
2020-10-29 14:27:37 +03:00
|
|
|
self.assertEqual(log["log"], "Hello there, wally!")
|
2019-08-28 14:18:53 +03:00
|
|
|
|
2023-01-26 22:45:24 +03:00
|
|
|
def test_extra_data(self) -> None:
|
2019-08-28 14:18:53 +03:00
|
|
|
"""
|
2020-10-29 14:27:37 +03:00
|
|
|
Additional information can be included in the structured logging.
|
2019-08-28 14:18:53 +03:00
|
|
|
"""
|
2020-12-11 15:25:01 +03:00
|
|
|
handler = logging.StreamHandler(self.output)
|
2020-10-29 14:27:37 +03:00
|
|
|
handler.setFormatter(TerseJsonFormatter())
|
|
|
|
logger = self.get_logger(handler)
|
2019-08-28 14:18:53 +03:00
|
|
|
|
2020-10-29 14:27:37 +03:00
|
|
|
logger.info(
|
|
|
|
"Hello there, %s!", "wally", extra={"foo": "bar", "int": 3, "bool": True}
|
2019-08-28 14:18:53 +03:00
|
|
|
)
|
|
|
|
|
2020-12-11 15:25:01 +03:00
|
|
|
log = self.get_log_line()
|
2019-08-28 14:18:53 +03:00
|
|
|
|
2020-10-29 14:27:37 +03:00
|
|
|
# The terse logger should give us these keys.
|
|
|
|
expected_log_keys = [
|
|
|
|
"log",
|
|
|
|
"time",
|
|
|
|
"level",
|
|
|
|
"namespace",
|
|
|
|
# The additional keys given via extra.
|
|
|
|
"foo",
|
|
|
|
"int",
|
|
|
|
"bool",
|
|
|
|
]
|
|
|
|
self.assertCountEqual(log.keys(), expected_log_keys)
|
2019-08-28 14:18:53 +03:00
|
|
|
|
2020-10-29 14:27:37 +03:00
|
|
|
# Check the values of the extra fields.
|
|
|
|
self.assertEqual(log["foo"], "bar")
|
|
|
|
self.assertEqual(log["int"], 3)
|
|
|
|
self.assertIs(log["bool"], True)
|
2019-08-28 14:18:53 +03:00
|
|
|
|
2023-01-26 22:45:24 +03:00
|
|
|
def test_json_output(self) -> None:
|
2019-08-28 14:18:53 +03:00
|
|
|
"""
|
2020-10-29 14:27:37 +03:00
|
|
|
The Terse JSON formatter converts log messages to JSON.
|
2019-08-28 14:18:53 +03:00
|
|
|
"""
|
2020-12-11 15:25:01 +03:00
|
|
|
handler = logging.StreamHandler(self.output)
|
2020-10-29 14:27:37 +03:00
|
|
|
handler.setFormatter(JsonFormatter())
|
|
|
|
logger = self.get_logger(handler)
|
2019-08-28 14:18:53 +03:00
|
|
|
|
2020-10-29 14:27:37 +03:00
|
|
|
logger.info("Hello there, %s!", "wally")
|
2019-08-28 14:18:53 +03:00
|
|
|
|
2020-12-11 15:25:01 +03:00
|
|
|
log = self.get_log_line()
|
|
|
|
|
|
|
|
# The terse logger should give us these keys.
|
|
|
|
expected_log_keys = [
|
|
|
|
"log",
|
|
|
|
"level",
|
|
|
|
"namespace",
|
|
|
|
]
|
|
|
|
self.assertCountEqual(log.keys(), expected_log_keys)
|
|
|
|
self.assertEqual(log["log"], "Hello there, wally!")
|
|
|
|
|
2023-01-26 22:45:24 +03:00
|
|
|
def test_with_context(self) -> None:
|
2020-12-11 15:25:01 +03:00
|
|
|
"""
|
|
|
|
The logging context should be added to the JSON response.
|
|
|
|
"""
|
|
|
|
handler = logging.StreamHandler(self.output)
|
|
|
|
handler.setFormatter(JsonFormatter())
|
2020-12-14 22:19:47 +03:00
|
|
|
handler.addFilter(LoggingContextFilter())
|
2020-12-11 15:25:01 +03:00
|
|
|
logger = self.get_logger(handler)
|
|
|
|
|
2021-04-08 15:01:14 +03:00
|
|
|
with LoggingContext("name"):
|
2020-12-11 15:25:01 +03:00
|
|
|
logger.info("Hello there, %s!", "wally")
|
|
|
|
|
|
|
|
log = self.get_log_line()
|
2019-08-28 14:18:53 +03:00
|
|
|
|
2020-10-29 14:27:37 +03:00
|
|
|
# The terse logger should give us these keys.
|
|
|
|
expected_log_keys = [
|
|
|
|
"log",
|
|
|
|
"level",
|
|
|
|
"namespace",
|
2020-12-11 15:25:01 +03:00
|
|
|
"request",
|
2020-10-29 14:27:37 +03:00
|
|
|
]
|
|
|
|
self.assertCountEqual(log.keys(), expected_log_keys)
|
|
|
|
self.assertEqual(log["log"], "Hello there, wally!")
|
2021-04-20 16:19:00 +03:00
|
|
|
self.assertEqual(log["request"], "name")
|
2021-04-08 15:01:14 +03:00
|
|
|
|
2023-01-26 22:45:24 +03:00
|
|
|
def test_with_request_context(self) -> None:
|
2021-04-08 15:01:14 +03:00
|
|
|
"""
|
|
|
|
Information from the logging context request should be added to the JSON response.
|
|
|
|
"""
|
|
|
|
handler = logging.StreamHandler(self.output)
|
|
|
|
handler.setFormatter(JsonFormatter())
|
|
|
|
handler.addFilter(LoggingContextFilter())
|
|
|
|
logger = self.get_logger(handler)
|
|
|
|
|
|
|
|
# A full request isn't needed here.
|
|
|
|
site = Mock(spec=["site_tag", "server_version_string", "getResourceFor"])
|
|
|
|
site.site_tag = "test-site"
|
|
|
|
site.server_version_string = "Server v1"
|
2021-09-24 13:01:25 +03:00
|
|
|
site.reactor = Mock()
|
2022-10-18 18:52:25 +03:00
|
|
|
site.experimental_cors_msc3886 = False
|
2023-01-26 22:45:24 +03:00
|
|
|
request = SynapseRequest(
|
|
|
|
cast(HTTPChannel, FakeChannel(site, self.reactor)), site
|
|
|
|
)
|
2021-04-08 15:01:14 +03:00
|
|
|
# Call requestReceived to finish instantiating the object.
|
|
|
|
request.content = BytesIO()
|
2023-01-26 22:45:24 +03:00
|
|
|
# Partially skip some internal processing of SynapseRequest.
|
2023-08-29 17:38:56 +03:00
|
|
|
request._started_processing = Mock() # type: ignore[method-assign]
|
2021-04-08 15:01:14 +03:00
|
|
|
request.request_metrics = Mock(spec=["name"])
|
|
|
|
with patch.object(Request, "render"):
|
|
|
|
request.requestReceived(b"POST", b"/_matrix/client/versions", b"1.1")
|
|
|
|
|
|
|
|
# Also set the requester to ensure the processing works.
|
|
|
|
request.requester = "@foo:test"
|
|
|
|
|
2021-04-20 16:19:00 +03:00
|
|
|
with LoggingContext(
|
|
|
|
request.get_request_id(), parent_context=request.logcontext
|
|
|
|
):
|
2021-04-08 15:01:14 +03:00
|
|
|
logger.info("Hello there, %s!", "wally")
|
|
|
|
|
|
|
|
log = self.get_log_line()
|
|
|
|
|
|
|
|
# The terse logger includes additional request information, if possible.
|
|
|
|
expected_log_keys = [
|
|
|
|
"log",
|
|
|
|
"level",
|
|
|
|
"namespace",
|
|
|
|
"request",
|
|
|
|
"ip_address",
|
|
|
|
"site_tag",
|
|
|
|
"requester",
|
|
|
|
"authenticated_entity",
|
|
|
|
"method",
|
|
|
|
"url",
|
|
|
|
"protocol",
|
|
|
|
"user_agent",
|
|
|
|
]
|
|
|
|
self.assertCountEqual(log.keys(), expected_log_keys)
|
|
|
|
self.assertEqual(log["log"], "Hello there, wally!")
|
|
|
|
self.assertTrue(log["request"].startswith("POST-"))
|
|
|
|
self.assertEqual(log["ip_address"], "127.0.0.1")
|
|
|
|
self.assertEqual(log["site_tag"], "test-site")
|
|
|
|
self.assertEqual(log["requester"], "@foo:test")
|
|
|
|
self.assertEqual(log["authenticated_entity"], "@foo:test")
|
|
|
|
self.assertEqual(log["method"], "POST")
|
|
|
|
self.assertEqual(log["url"], "/_matrix/client/versions")
|
|
|
|
self.assertEqual(log["protocol"], "1.1")
|
|
|
|
self.assertEqual(log["user_agent"], "")
|
2021-10-08 14:08:25 +03:00
|
|
|
|
2023-01-26 22:45:24 +03:00
|
|
|
def test_with_exception(self) -> None:
|
2021-10-08 14:08:25 +03:00
|
|
|
"""
|
|
|
|
The logging exception type & value should be added to the JSON response.
|
|
|
|
"""
|
|
|
|
handler = logging.StreamHandler(self.output)
|
|
|
|
handler.setFormatter(JsonFormatter())
|
|
|
|
logger = self.get_logger(handler)
|
|
|
|
|
|
|
|
try:
|
|
|
|
raise ValueError("That's wrong, you wally!")
|
|
|
|
except ValueError:
|
|
|
|
logger.exception("Hello there, %s!", "wally")
|
|
|
|
|
|
|
|
log = self.get_log_line()
|
|
|
|
|
|
|
|
# The terse logger should give us these keys.
|
|
|
|
expected_log_keys = [
|
|
|
|
"log",
|
|
|
|
"level",
|
|
|
|
"namespace",
|
|
|
|
"exc_type",
|
|
|
|
"exc_value",
|
|
|
|
]
|
|
|
|
self.assertCountEqual(log.keys(), expected_log_keys)
|
|
|
|
self.assertEqual(log["log"], "Hello there, wally!")
|
|
|
|
self.assertEqual(log["exc_type"], "ValueError")
|
|
|
|
self.assertEqual(log["exc_value"], "That's wrong, you wally!")
|