mirror of
https://github.com/element-hq/synapse.git
synced 2024-11-23 10:05:55 +03:00
Merge branch 'rav/db_txn_time_in_millis' into rav/track_db_scheduling
This commit is contained in:
commit
0f5d2cc37c
6 changed files with 49 additions and 31 deletions
|
@ -93,6 +93,8 @@ response_db_txn_count = metrics.register_counter(
|
|||
),
|
||||
)
|
||||
|
||||
# seconds spent waiting for db txns, excluding scheduling time, when processing
|
||||
# this request
|
||||
response_db_txn_duration = metrics.register_counter(
|
||||
"response_db_txn_duration_seconds", labels=["method", "servlet", "tag"],
|
||||
alternative_names=(
|
||||
|
@ -377,7 +379,7 @@ class RequestMetrics(object):
|
|||
context.db_txn_count, request.method, self.name, tag
|
||||
)
|
||||
response_db_txn_duration.inc_by(
|
||||
context.db_txn_duration, request.method, self.name, tag
|
||||
context.db_txn_duration_ms / 1000., request.method, self.name, tag
|
||||
)
|
||||
|
||||
|
||||
|
|
|
@ -66,10 +66,10 @@ class SynapseRequest(Request):
|
|||
context = LoggingContext.current_context()
|
||||
ru_utime, ru_stime = context.get_resource_usage()
|
||||
db_txn_count = context.db_txn_count
|
||||
db_txn_duration = context.db_txn_duration
|
||||
db_txn_duration_ms = context.db_txn_duration_ms
|
||||
except Exception:
|
||||
ru_utime, ru_stime = (0, 0)
|
||||
db_txn_count, db_txn_duration = (0, 0)
|
||||
db_txn_count, db_txn_duration_ms = (0, 0)
|
||||
|
||||
self.site.access_logger.info(
|
||||
"%s - %s - {%s}"
|
||||
|
@ -81,7 +81,7 @@ class SynapseRequest(Request):
|
|||
int(time.time() * 1000) - self.start_time,
|
||||
int(ru_utime * 1000),
|
||||
int(ru_stime * 1000),
|
||||
int(db_txn_duration * 1000),
|
||||
db_txn_duration_ms,
|
||||
int(db_txn_count),
|
||||
self.sentLength,
|
||||
self.code,
|
||||
|
|
|
@ -52,13 +52,16 @@ except Exception:
|
|||
class LoggingContext(object):
|
||||
"""Additional context for log formatting. Contexts are scoped within a
|
||||
"with" block.
|
||||
|
||||
Args:
|
||||
name (str): Name for the context for debugging.
|
||||
"""
|
||||
|
||||
__slots__ = [
|
||||
"previous_context", "name", "usage_start", "usage_end", "main_thread",
|
||||
"__dict__", "tag", "alive",
|
||||
"previous_context", "name", "ru_stime", "ru_utime",
|
||||
"db_txn_count", "db_txn_duration_ms", "usage_start", "usage_end",
|
||||
"main_thread", "alive",
|
||||
"request", "tag",
|
||||
]
|
||||
|
||||
thread_local = threading.local()
|
||||
|
@ -94,9 +97,14 @@ class LoggingContext(object):
|
|||
self.ru_stime = 0.
|
||||
self.ru_utime = 0.
|
||||
self.db_txn_count = 0
|
||||
self.db_txn_duration = 0.
|
||||
|
||||
# ms spent waiting for db txns, excluding scheduling time
|
||||
self.db_txn_duration_ms = 0
|
||||
|
||||
self.usage_start = None
|
||||
self.usage_end = None
|
||||
self.main_thread = threading.current_thread()
|
||||
self.request = None
|
||||
self.tag = ""
|
||||
self.alive = True
|
||||
|
||||
|
@ -105,7 +113,11 @@ class LoggingContext(object):
|
|||
|
||||
@classmethod
|
||||
def current_context(cls):
|
||||
"""Get the current logging context from thread local storage"""
|
||||
"""Get the current logging context from thread local storage
|
||||
|
||||
Returns:
|
||||
LoggingContext: the current logging context
|
||||
"""
|
||||
return getattr(cls.thread_local, "current_context", cls.sentinel)
|
||||
|
||||
@classmethod
|
||||
|
@ -155,11 +167,13 @@ class LoggingContext(object):
|
|||
self.alive = False
|
||||
|
||||
def copy_to(self, record):
|
||||
"""Copy fields from this context to the record"""
|
||||
for key, value in self.__dict__.items():
|
||||
setattr(record, key, value)
|
||||
"""Copy logging fields from this context to a log record or
|
||||
another LoggingContext
|
||||
"""
|
||||
|
||||
record.ru_utime, record.ru_stime = self.get_resource_usage()
|
||||
# 'request' is the only field we currently use in the logger, so that's
|
||||
# all we need to copy
|
||||
record.request = self.request
|
||||
|
||||
def start(self):
|
||||
if threading.current_thread() is not self.main_thread:
|
||||
|
@ -194,7 +208,7 @@ class LoggingContext(object):
|
|||
|
||||
def add_database_transaction(self, duration_ms):
|
||||
self.db_txn_count += 1
|
||||
self.db_txn_duration += duration_ms / 1000.
|
||||
self.db_txn_duration_ms += duration_ms
|
||||
|
||||
|
||||
class LoggingContextFilter(logging.Filter):
|
||||
|
|
|
@ -72,6 +72,7 @@ block_db_txn_count = metrics.register_counter(
|
|||
),
|
||||
)
|
||||
|
||||
# seconds spent waiting for db txns, excluding scheduling time, in this block
|
||||
block_db_txn_duration = metrics.register_counter(
|
||||
"block_db_txn_duration_seconds", labels=["block_name"],
|
||||
alternative_names=(
|
||||
|
@ -95,7 +96,7 @@ def measure_func(name):
|
|||
class Measure(object):
|
||||
__slots__ = [
|
||||
"clock", "name", "start_context", "start", "new_context", "ru_utime",
|
||||
"ru_stime", "db_txn_count", "db_txn_duration", "created_context"
|
||||
"ru_stime", "db_txn_count", "db_txn_duration_ms", "created_context"
|
||||
]
|
||||
|
||||
def __init__(self, clock, name):
|
||||
|
@ -115,7 +116,7 @@ class Measure(object):
|
|||
|
||||
self.ru_utime, self.ru_stime = self.start_context.get_resource_usage()
|
||||
self.db_txn_count = self.start_context.db_txn_count
|
||||
self.db_txn_duration = self.start_context.db_txn_duration
|
||||
self.db_txn_duration_ms = self.start_context.db_txn_duration_ms
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
if isinstance(exc_type, Exception) or not self.start_context:
|
||||
|
@ -145,7 +146,8 @@ class Measure(object):
|
|||
context.db_txn_count - self.db_txn_count, self.name
|
||||
)
|
||||
block_db_txn_duration.inc_by(
|
||||
context.db_txn_duration - self.db_txn_duration, self.name
|
||||
(context.db_txn_duration_ms - self.db_txn_duration_ms) / 1000.,
|
||||
self.name
|
||||
)
|
||||
|
||||
if self.created_context:
|
||||
|
|
|
@ -68,7 +68,7 @@ class KeyringTestCase(unittest.TestCase):
|
|||
|
||||
def check_context(self, _, expected):
|
||||
self.assertEquals(
|
||||
getattr(LoggingContext.current_context(), "test_key", None),
|
||||
getattr(LoggingContext.current_context(), "request", None),
|
||||
expected
|
||||
)
|
||||
|
||||
|
@ -82,7 +82,7 @@ class KeyringTestCase(unittest.TestCase):
|
|||
lookup_2_deferred = defer.Deferred()
|
||||
|
||||
with LoggingContext("one") as context_one:
|
||||
context_one.test_key = "one"
|
||||
context_one.request = "one"
|
||||
|
||||
wait_1_deferred = kr.wait_for_previous_lookups(
|
||||
["server1"],
|
||||
|
@ -96,7 +96,7 @@ class KeyringTestCase(unittest.TestCase):
|
|||
wait_1_deferred.addBoth(self.check_context, "one")
|
||||
|
||||
with LoggingContext("two") as context_two:
|
||||
context_two.test_key = "two"
|
||||
context_two.request = "two"
|
||||
|
||||
# set off another wait. It should block because the first lookup
|
||||
# hasn't yet completed.
|
||||
|
@ -137,7 +137,7 @@ class KeyringTestCase(unittest.TestCase):
|
|||
@defer.inlineCallbacks
|
||||
def get_perspectives(**kwargs):
|
||||
self.assertEquals(
|
||||
LoggingContext.current_context().test_key, "11",
|
||||
LoggingContext.current_context().request, "11",
|
||||
)
|
||||
with logcontext.PreserveLoggingContext():
|
||||
yield persp_deferred
|
||||
|
@ -145,7 +145,7 @@ class KeyringTestCase(unittest.TestCase):
|
|||
self.http_client.post_json.side_effect = get_perspectives
|
||||
|
||||
with LoggingContext("11") as context_11:
|
||||
context_11.test_key = "11"
|
||||
context_11.request = "11"
|
||||
|
||||
# start off a first set of lookups
|
||||
res_deferreds = kr.verify_json_objects_for_server(
|
||||
|
@ -173,7 +173,7 @@ class KeyringTestCase(unittest.TestCase):
|
|||
self.assertIs(LoggingContext.current_context(), context_11)
|
||||
|
||||
context_12 = LoggingContext("12")
|
||||
context_12.test_key = "12"
|
||||
context_12.request = "12"
|
||||
with logcontext.PreserveLoggingContext(context_12):
|
||||
# a second request for a server with outstanding requests
|
||||
# should block rather than start a second call
|
||||
|
@ -211,7 +211,7 @@ class KeyringTestCase(unittest.TestCase):
|
|||
sentinel_context = LoggingContext.current_context()
|
||||
|
||||
with LoggingContext("one") as context_one:
|
||||
context_one.test_key = "one"
|
||||
context_one.request = "one"
|
||||
|
||||
defer = kr.verify_json_for_server("server9", {})
|
||||
try:
|
||||
|
|
|
@ -12,12 +12,12 @@ class LoggingContextTestCase(unittest.TestCase):
|
|||
|
||||
def _check_test_key(self, value):
|
||||
self.assertEquals(
|
||||
LoggingContext.current_context().test_key, value
|
||||
LoggingContext.current_context().request, value
|
||||
)
|
||||
|
||||
def test_with_context(self):
|
||||
with LoggingContext() as context_one:
|
||||
context_one.test_key = "test"
|
||||
context_one.request = "test"
|
||||
self._check_test_key("test")
|
||||
|
||||
@defer.inlineCallbacks
|
||||
|
@ -25,14 +25,14 @@ class LoggingContextTestCase(unittest.TestCase):
|
|||
@defer.inlineCallbacks
|
||||
def competing_callback():
|
||||
with LoggingContext() as competing_context:
|
||||
competing_context.test_key = "competing"
|
||||
competing_context.request = "competing"
|
||||
yield sleep(0)
|
||||
self._check_test_key("competing")
|
||||
|
||||
reactor.callLater(0, competing_callback)
|
||||
|
||||
with LoggingContext() as context_one:
|
||||
context_one.test_key = "one"
|
||||
context_one.request = "one"
|
||||
yield sleep(0)
|
||||
self._check_test_key("one")
|
||||
|
||||
|
@ -43,14 +43,14 @@ class LoggingContextTestCase(unittest.TestCase):
|
|||
|
||||
@defer.inlineCallbacks
|
||||
def cb():
|
||||
context_one.test_key = "one"
|
||||
context_one.request = "one"
|
||||
yield function()
|
||||
self._check_test_key("one")
|
||||
|
||||
callback_completed[0] = True
|
||||
|
||||
with LoggingContext() as context_one:
|
||||
context_one.test_key = "one"
|
||||
context_one.request = "one"
|
||||
|
||||
# fire off function, but don't wait on it.
|
||||
logcontext.preserve_fn(cb)()
|
||||
|
@ -107,7 +107,7 @@ class LoggingContextTestCase(unittest.TestCase):
|
|||
sentinel_context = LoggingContext.current_context()
|
||||
|
||||
with LoggingContext() as context_one:
|
||||
context_one.test_key = "one"
|
||||
context_one.request = "one"
|
||||
|
||||
d1 = logcontext.make_deferred_yieldable(blocking_function())
|
||||
# make sure that the context was reset by make_deferred_yieldable
|
||||
|
@ -124,7 +124,7 @@ class LoggingContextTestCase(unittest.TestCase):
|
|||
argument isn't actually a deferred"""
|
||||
|
||||
with LoggingContext() as context_one:
|
||||
context_one.test_key = "one"
|
||||
context_one.request = "one"
|
||||
|
||||
d1 = logcontext.make_deferred_yieldable("bum")
|
||||
self._check_test_key("one")
|
||||
|
|
Loading…
Reference in a new issue