mirror of
https://github.com/element-hq/synapse.git
synced 2024-11-21 09:05:42 +03:00
Run Black. (#5482)
This commit is contained in:
parent
7dcf984075
commit
32e7c9e7f2
376 changed files with 9142 additions and 10388 deletions
|
@ -5,8 +5,8 @@ steps:
|
|||
|
||||
- command:
|
||||
- "python -m pip install tox"
|
||||
- "tox -e pep8"
|
||||
label: "\U0001F9F9 PEP-8"
|
||||
- "tox -e check_codestyle"
|
||||
label: "\U0001F9F9 Check Style"
|
||||
plugins:
|
||||
- docker#v3.0.1:
|
||||
image: "python:3.6"
|
||||
|
|
1
changelog.d/5482.misc
Normal file
1
changelog.d/5482.misc
Normal file
|
@ -0,0 +1 @@
|
|||
Synapse's codebase is now formatted by `black`.
|
|
@ -37,9 +37,8 @@ from signedjson.sign import verify_signed_json, SignatureVerifyException
|
|||
|
||||
CONFIG_JSON = "cmdclient_config.json"
|
||||
|
||||
TRUSTED_ID_SERVERS = [
|
||||
'localhost:8001'
|
||||
]
|
||||
TRUSTED_ID_SERVERS = ["localhost:8001"]
|
||||
|
||||
|
||||
class SynapseCmd(cmd.Cmd):
|
||||
|
||||
|
@ -59,7 +58,7 @@ class SynapseCmd(cmd.Cmd):
|
|||
"token": token,
|
||||
"verbose": "on",
|
||||
"complete_usernames": "on",
|
||||
"send_delivery_receipts": "on"
|
||||
"send_delivery_receipts": "on",
|
||||
}
|
||||
self.path_prefix = "/_matrix/client/api/v1"
|
||||
self.event_stream_token = "END"
|
||||
|
@ -120,12 +119,11 @@ class SynapseCmd(cmd.Cmd):
|
|||
config_rules = [ # key, valid_values
|
||||
("verbose", ["on", "off"]),
|
||||
("complete_usernames", ["on", "off"]),
|
||||
("send_delivery_receipts", ["on", "off"])
|
||||
("send_delivery_receipts", ["on", "off"]),
|
||||
]
|
||||
for key, valid_vals in config_rules:
|
||||
if key == args["key"] and args["val"] not in valid_vals:
|
||||
print("%s value must be one of %s" % (args["key"],
|
||||
valid_vals))
|
||||
print("%s value must be one of %s" % (args["key"], valid_vals))
|
||||
return
|
||||
|
||||
# toggle the http client verbosity
|
||||
|
@ -159,16 +157,13 @@ class SynapseCmd(cmd.Cmd):
|
|||
else:
|
||||
password = pwd
|
||||
|
||||
body = {
|
||||
"type": "m.login.password"
|
||||
}
|
||||
body = {"type": "m.login.password"}
|
||||
if "userid" in args:
|
||||
body["user"] = args["userid"]
|
||||
if password:
|
||||
body["password"] = password
|
||||
|
||||
reactor.callFromThread(self._do_register, body,
|
||||
"noupdate" not in args)
|
||||
reactor.callFromThread(self._do_register, body, "noupdate" not in args)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _do_register(self, data, update_config):
|
||||
|
@ -179,7 +174,9 @@ class SynapseCmd(cmd.Cmd):
|
|||
|
||||
passwordFlow = None
|
||||
for flow in json_res["flows"]:
|
||||
if flow["type"] == "m.login.recaptcha" or ("stages" in flow and "m.login.recaptcha" in flow["stages"]):
|
||||
if flow["type"] == "m.login.recaptcha" or (
|
||||
"stages" in flow and "m.login.recaptcha" in flow["stages"]
|
||||
):
|
||||
print("Unable to register: Home server requires captcha.")
|
||||
return
|
||||
if flow["type"] == "m.login.password" and "stages" not in flow:
|
||||
|
@ -202,9 +199,7 @@ class SynapseCmd(cmd.Cmd):
|
|||
"""
|
||||
try:
|
||||
args = self._parse(line, ["user_id"], force_keys=True)
|
||||
can_login = threads.blockingCallFromThread(
|
||||
reactor,
|
||||
self._check_can_login)
|
||||
can_login = threads.blockingCallFromThread(reactor, self._check_can_login)
|
||||
if can_login:
|
||||
p = getpass.getpass("Enter your password: ")
|
||||
user = args["user_id"]
|
||||
|
@ -212,20 +207,16 @@ class SynapseCmd(cmd.Cmd):
|
|||
domain = self._domain()
|
||||
if domain:
|
||||
user = "@" + user + ":" + domain
|
||||
|
||||
|
||||
reactor.callFromThread(self._do_login, user, p)
|
||||
#print " got %s " % p
|
||||
# print " got %s " % p
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _do_login(self, user, password):
|
||||
path = "/login"
|
||||
data = {
|
||||
"user": user,
|
||||
"password": password,
|
||||
"type": "m.login.password"
|
||||
}
|
||||
data = {"user": user, "password": password, "type": "m.login.password"}
|
||||
url = self._url() + path
|
||||
json_res = yield self.http_client.do_request("POST", url, data=data)
|
||||
print(json_res)
|
||||
|
@ -249,12 +240,13 @@ class SynapseCmd(cmd.Cmd):
|
|||
print("Failed to find any login flows.")
|
||||
defer.returnValue(False)
|
||||
|
||||
flow = json_res["flows"][0] # assume first is the one we want.
|
||||
if ("type" not in flow or "m.login.password" != flow["type"] or
|
||||
"stages" in flow):
|
||||
flow = json_res["flows"][0] # assume first is the one we want.
|
||||
if "type" not in flow or "m.login.password" != flow["type"] or "stages" in flow:
|
||||
fallback_url = self._url() + "/login/fallback"
|
||||
print ("Unable to login via the command line client. Please visit "
|
||||
"%s to login." % fallback_url)
|
||||
print(
|
||||
"Unable to login via the command line client. Please visit "
|
||||
"%s to login." % fallback_url
|
||||
)
|
||||
defer.returnValue(False)
|
||||
defer.returnValue(True)
|
||||
|
||||
|
@ -264,21 +256,33 @@ class SynapseCmd(cmd.Cmd):
|
|||
<clientSecret> A string of characters generated when requesting an email that you'll supply in subsequent calls to identify yourself
|
||||
<sendAttempt> The number of times the user has requested an email. Leave this the same between requests to retry the request at the transport level. Increment it to request that the email be sent again.
|
||||
"""
|
||||
args = self._parse(line, ['address', 'clientSecret', 'sendAttempt'])
|
||||
args = self._parse(line, ["address", "clientSecret", "sendAttempt"])
|
||||
|
||||
postArgs = {'email': args['address'], 'clientSecret': args['clientSecret'], 'sendAttempt': args['sendAttempt']}
|
||||
postArgs = {
|
||||
"email": args["address"],
|
||||
"clientSecret": args["clientSecret"],
|
||||
"sendAttempt": args["sendAttempt"],
|
||||
}
|
||||
|
||||
reactor.callFromThread(self._do_emailrequest, postArgs)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _do_emailrequest(self, args):
|
||||
url = self._identityServerUrl()+"/_matrix/identity/api/v1/validate/email/requestToken"
|
||||
url = (
|
||||
self._identityServerUrl()
|
||||
+ "/_matrix/identity/api/v1/validate/email/requestToken"
|
||||
)
|
||||
|
||||
json_res = yield self.http_client.do_request("POST", url, data=urllib.urlencode(args), jsonreq=False,
|
||||
headers={'Content-Type': ['application/x-www-form-urlencoded']})
|
||||
json_res = yield self.http_client.do_request(
|
||||
"POST",
|
||||
url,
|
||||
data=urllib.urlencode(args),
|
||||
jsonreq=False,
|
||||
headers={"Content-Type": ["application/x-www-form-urlencoded"]},
|
||||
)
|
||||
print(json_res)
|
||||
if 'sid' in json_res:
|
||||
print("Token sent. Your session ID is %s" % (json_res['sid']))
|
||||
if "sid" in json_res:
|
||||
print("Token sent. Your session ID is %s" % (json_res["sid"]))
|
||||
|
||||
def do_emailvalidate(self, line):
|
||||
"""Validate and associate a third party ID
|
||||
|
@ -286,18 +290,30 @@ class SynapseCmd(cmd.Cmd):
|
|||
<token> The token sent to your third party identifier address
|
||||
<clientSecret> The same clientSecret you supplied in requestToken
|
||||
"""
|
||||
args = self._parse(line, ['sid', 'token', 'clientSecret'])
|
||||
args = self._parse(line, ["sid", "token", "clientSecret"])
|
||||
|
||||
postArgs = { 'sid' : args['sid'], 'token' : args['token'], 'clientSecret': args['clientSecret'] }
|
||||
postArgs = {
|
||||
"sid": args["sid"],
|
||||
"token": args["token"],
|
||||
"clientSecret": args["clientSecret"],
|
||||
}
|
||||
|
||||
reactor.callFromThread(self._do_emailvalidate, postArgs)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _do_emailvalidate(self, args):
|
||||
url = self._identityServerUrl()+"/_matrix/identity/api/v1/validate/email/submitToken"
|
||||
url = (
|
||||
self._identityServerUrl()
|
||||
+ "/_matrix/identity/api/v1/validate/email/submitToken"
|
||||
)
|
||||
|
||||
json_res = yield self.http_client.do_request("POST", url, data=urllib.urlencode(args), jsonreq=False,
|
||||
headers={'Content-Type': ['application/x-www-form-urlencoded']})
|
||||
json_res = yield self.http_client.do_request(
|
||||
"POST",
|
||||
url,
|
||||
data=urllib.urlencode(args),
|
||||
jsonreq=False,
|
||||
headers={"Content-Type": ["application/x-www-form-urlencoded"]},
|
||||
)
|
||||
print(json_res)
|
||||
|
||||
def do_3pidbind(self, line):
|
||||
|
@ -305,19 +321,24 @@ class SynapseCmd(cmd.Cmd):
|
|||
<sid> The session ID (sid) given to you in the response to requestToken
|
||||
<clientSecret> The same clientSecret you supplied in requestToken
|
||||
"""
|
||||
args = self._parse(line, ['sid', 'clientSecret'])
|
||||
args = self._parse(line, ["sid", "clientSecret"])
|
||||
|
||||
postArgs = { 'sid' : args['sid'], 'clientSecret': args['clientSecret'] }
|
||||
postArgs['mxid'] = self.config["user"]
|
||||
postArgs = {"sid": args["sid"], "clientSecret": args["clientSecret"]}
|
||||
postArgs["mxid"] = self.config["user"]
|
||||
|
||||
reactor.callFromThread(self._do_3pidbind, postArgs)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _do_3pidbind(self, args):
|
||||
url = self._identityServerUrl()+"/_matrix/identity/api/v1/3pid/bind"
|
||||
url = self._identityServerUrl() + "/_matrix/identity/api/v1/3pid/bind"
|
||||
|
||||
json_res = yield self.http_client.do_request("POST", url, data=urllib.urlencode(args), jsonreq=False,
|
||||
headers={'Content-Type': ['application/x-www-form-urlencoded']})
|
||||
json_res = yield self.http_client.do_request(
|
||||
"POST",
|
||||
url,
|
||||
data=urllib.urlencode(args),
|
||||
jsonreq=False,
|
||||
headers={"Content-Type": ["application/x-www-form-urlencoded"]},
|
||||
)
|
||||
print(json_res)
|
||||
|
||||
def do_join(self, line):
|
||||
|
@ -356,9 +377,7 @@ class SynapseCmd(cmd.Cmd):
|
|||
if "topic" not in args:
|
||||
print("Must specify a new topic.")
|
||||
return
|
||||
body = {
|
||||
"topic": args["topic"]
|
||||
}
|
||||
body = {"topic": args["topic"]}
|
||||
reactor.callFromThread(self._run_and_pprint, "PUT", path, body)
|
||||
elif args["action"].lower() == "get":
|
||||
reactor.callFromThread(self._run_and_pprint, "GET", path)
|
||||
|
@ -378,45 +397,60 @@ class SynapseCmd(cmd.Cmd):
|
|||
|
||||
@defer.inlineCallbacks
|
||||
def _do_invite(self, roomid, userstring):
|
||||
if (not userstring.startswith('@') and
|
||||
self._is_on("complete_usernames")):
|
||||
url = self._identityServerUrl()+"/_matrix/identity/api/v1/lookup"
|
||||
if not userstring.startswith("@") and self._is_on("complete_usernames"):
|
||||
url = self._identityServerUrl() + "/_matrix/identity/api/v1/lookup"
|
||||
|
||||
json_res = yield self.http_client.do_request("GET", url, qparams={'medium':'email','address':userstring})
|
||||
json_res = yield self.http_client.do_request(
|
||||
"GET", url, qparams={"medium": "email", "address": userstring}
|
||||
)
|
||||
|
||||
mxid = None
|
||||
|
||||
if 'mxid' in json_res and 'signatures' in json_res:
|
||||
url = self._identityServerUrl()+"/_matrix/identity/api/v1/pubkey/ed25519"
|
||||
if "mxid" in json_res and "signatures" in json_res:
|
||||
url = (
|
||||
self._identityServerUrl()
|
||||
+ "/_matrix/identity/api/v1/pubkey/ed25519"
|
||||
)
|
||||
|
||||
pubKey = None
|
||||
pubKeyObj = yield self.http_client.do_request("GET", url)
|
||||
if 'public_key' in pubKeyObj:
|
||||
pubKey = nacl.signing.VerifyKey(pubKeyObj['public_key'], encoder=nacl.encoding.HexEncoder)
|
||||
if "public_key" in pubKeyObj:
|
||||
pubKey = nacl.signing.VerifyKey(
|
||||
pubKeyObj["public_key"], encoder=nacl.encoding.HexEncoder
|
||||
)
|
||||
else:
|
||||
print("No public key found in pubkey response!")
|
||||
|
||||
sigValid = False
|
||||
|
||||
if pubKey:
|
||||
for signame in json_res['signatures']:
|
||||
for signame in json_res["signatures"]:
|
||||
if signame not in TRUSTED_ID_SERVERS:
|
||||
print("Ignoring signature from untrusted server %s" % (signame))
|
||||
print(
|
||||
"Ignoring signature from untrusted server %s"
|
||||
% (signame)
|
||||
)
|
||||
else:
|
||||
try:
|
||||
verify_signed_json(json_res, signame, pubKey)
|
||||
sigValid = True
|
||||
print("Mapping %s -> %s correctly signed by %s" % (userstring, json_res['mxid'], signame))
|
||||
print(
|
||||
"Mapping %s -> %s correctly signed by %s"
|
||||
% (userstring, json_res["mxid"], signame)
|
||||
)
|
||||
break
|
||||
except SignatureVerifyException as e:
|
||||
print("Invalid signature from %s" % (signame))
|
||||
print(e)
|
||||
|
||||
if sigValid:
|
||||
print("Resolved 3pid %s to %s" % (userstring, json_res['mxid']))
|
||||
mxid = json_res['mxid']
|
||||
print("Resolved 3pid %s to %s" % (userstring, json_res["mxid"]))
|
||||
mxid = json_res["mxid"]
|
||||
else:
|
||||
print("Got association for %s but couldn't verify signature" % (userstring))
|
||||
print(
|
||||
"Got association for %s but couldn't verify signature"
|
||||
% (userstring)
|
||||
)
|
||||
|
||||
if not mxid:
|
||||
mxid = "@" + userstring + ":" + self._domain()
|
||||
|
@ -435,12 +469,11 @@ class SynapseCmd(cmd.Cmd):
|
|||
"""Sends a message. "send <roomid> <body>" """
|
||||
args = self._parse(line, ["roomid", "body"])
|
||||
txn_id = "txn%s" % int(time.time())
|
||||
path = "/rooms/%s/send/m.room.message/%s" % (urllib.quote(args["roomid"]),
|
||||
txn_id)
|
||||
body_json = {
|
||||
"msgtype": "m.text",
|
||||
"body": args["body"]
|
||||
}
|
||||
path = "/rooms/%s/send/m.room.message/%s" % (
|
||||
urllib.quote(args["roomid"]),
|
||||
txn_id,
|
||||
)
|
||||
body_json = {"msgtype": "m.text", "body": args["body"]}
|
||||
reactor.callFromThread(self._run_and_pprint, "PUT", path, body_json)
|
||||
|
||||
def do_list(self, line):
|
||||
|
@ -472,8 +505,7 @@ class SynapseCmd(cmd.Cmd):
|
|||
print("Bad query param: %s" % key_value)
|
||||
return
|
||||
|
||||
reactor.callFromThread(self._run_and_pprint, "GET", path,
|
||||
query_params=qp)
|
||||
reactor.callFromThread(self._run_and_pprint, "GET", path, query_params=qp)
|
||||
|
||||
def do_create(self, line):
|
||||
"""Creates a room.
|
||||
|
@ -513,8 +545,16 @@ class SynapseCmd(cmd.Cmd):
|
|||
return
|
||||
|
||||
args["method"] = args["method"].upper()
|
||||
valid_methods = ["PUT", "GET", "POST", "DELETE",
|
||||
"XPUT", "XGET", "XPOST", "XDELETE"]
|
||||
valid_methods = [
|
||||
"PUT",
|
||||
"GET",
|
||||
"POST",
|
||||
"DELETE",
|
||||
"XPUT",
|
||||
"XGET",
|
||||
"XPOST",
|
||||
"XDELETE",
|
||||
]
|
||||
if args["method"] not in valid_methods:
|
||||
print("Unsupported method: %s" % args["method"])
|
||||
return
|
||||
|
@ -541,10 +581,13 @@ class SynapseCmd(cmd.Cmd):
|
|||
except:
|
||||
pass
|
||||
|
||||
reactor.callFromThread(self._run_and_pprint, args["method"],
|
||||
args["path"],
|
||||
args["data"],
|
||||
query_params=qp)
|
||||
reactor.callFromThread(
|
||||
self._run_and_pprint,
|
||||
args["method"],
|
||||
args["path"],
|
||||
args["data"],
|
||||
query_params=qp,
|
||||
)
|
||||
|
||||
def do_stream(self, line):
|
||||
"""Stream data from the server: "stream <longpoll timeout ms>" """
|
||||
|
@ -561,19 +604,22 @@ class SynapseCmd(cmd.Cmd):
|
|||
@defer.inlineCallbacks
|
||||
def _do_event_stream(self, timeout):
|
||||
res = yield self.http_client.get_json(
|
||||
self._url() + "/events",
|
||||
{
|
||||
"access_token": self._tok(),
|
||||
"timeout": str(timeout),
|
||||
"from": self.event_stream_token
|
||||
})
|
||||
self._url() + "/events",
|
||||
{
|
||||
"access_token": self._tok(),
|
||||
"timeout": str(timeout),
|
||||
"from": self.event_stream_token,
|
||||
},
|
||||
)
|
||||
print(json.dumps(res, indent=4))
|
||||
|
||||
if "chunk" in res:
|
||||
for event in res["chunk"]:
|
||||
if (event["type"] == "m.room.message" and
|
||||
self._is_on("send_delivery_receipts") and
|
||||
event["user_id"] != self._usr()): # not sent by us
|
||||
if (
|
||||
event["type"] == "m.room.message"
|
||||
and self._is_on("send_delivery_receipts")
|
||||
and event["user_id"] != self._usr()
|
||||
): # not sent by us
|
||||
self._send_receipt(event, "d")
|
||||
|
||||
# update the position in the stram
|
||||
|
@ -581,18 +627,28 @@ class SynapseCmd(cmd.Cmd):
|
|||
self.event_stream_token = res["end"]
|
||||
|
||||
def _send_receipt(self, event, feedback_type):
|
||||
path = ("/rooms/%s/messages/%s/%s/feedback/%s/%s" %
|
||||
(urllib.quote(event["room_id"]), event["user_id"], event["msg_id"],
|
||||
self._usr(), feedback_type))
|
||||
path = "/rooms/%s/messages/%s/%s/feedback/%s/%s" % (
|
||||
urllib.quote(event["room_id"]),
|
||||
event["user_id"],
|
||||
event["msg_id"],
|
||||
self._usr(),
|
||||
feedback_type,
|
||||
)
|
||||
data = {}
|
||||
reactor.callFromThread(self._run_and_pprint, "PUT", path, data=data,
|
||||
alt_text="Sent receipt for %s" % event["msg_id"])
|
||||
reactor.callFromThread(
|
||||
self._run_and_pprint,
|
||||
"PUT",
|
||||
path,
|
||||
data=data,
|
||||
alt_text="Sent receipt for %s" % event["msg_id"],
|
||||
)
|
||||
|
||||
def _do_membership_change(self, roomid, membership, userid):
|
||||
path = "/rooms/%s/state/m.room.member/%s" % (urllib.quote(roomid), urllib.quote(userid))
|
||||
data = {
|
||||
"membership": membership
|
||||
}
|
||||
path = "/rooms/%s/state/m.room.member/%s" % (
|
||||
urllib.quote(roomid),
|
||||
urllib.quote(userid),
|
||||
)
|
||||
data = {"membership": membership}
|
||||
reactor.callFromThread(self._run_and_pprint, "PUT", path, data=data)
|
||||
|
||||
def do_displayname(self, line):
|
||||
|
@ -645,15 +701,20 @@ class SynapseCmd(cmd.Cmd):
|
|||
for i, arg in enumerate(line_args):
|
||||
for config_key in self.config:
|
||||
if ("$" + config_key) in arg:
|
||||
arg = arg.replace("$" + config_key,
|
||||
self.config[config_key])
|
||||
arg = arg.replace("$" + config_key, self.config[config_key])
|
||||
line_args[i] = arg
|
||||
|
||||
return dict(zip(keys, line_args))
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _run_and_pprint(self, method, path, data=None,
|
||||
query_params={"access_token": None}, alt_text=None):
|
||||
def _run_and_pprint(
|
||||
self,
|
||||
method,
|
||||
path,
|
||||
data=None,
|
||||
query_params={"access_token": None},
|
||||
alt_text=None,
|
||||
):
|
||||
""" Runs an HTTP request and pretty prints the output.
|
||||
|
||||
Args:
|
||||
|
@ -666,9 +727,9 @@ class SynapseCmd(cmd.Cmd):
|
|||
if "access_token" in query_params:
|
||||
query_params["access_token"] = self._tok()
|
||||
|
||||
json_res = yield self.http_client.do_request(method, url,
|
||||
data=data,
|
||||
qparams=query_params)
|
||||
json_res = yield self.http_client.do_request(
|
||||
method, url, data=data, qparams=query_params
|
||||
)
|
||||
if alt_text:
|
||||
print(alt_text)
|
||||
else:
|
||||
|
@ -676,7 +737,7 @@ class SynapseCmd(cmd.Cmd):
|
|||
|
||||
|
||||
def save_config(config):
|
||||
with open(CONFIG_JSON, 'w') as out:
|
||||
with open(CONFIG_JSON, "w") as out:
|
||||
json.dump(config, out)
|
||||
|
||||
|
||||
|
@ -700,7 +761,7 @@ def main(server_url, identity_server_url, username, token, config_path):
|
|||
global CONFIG_JSON
|
||||
CONFIG_JSON = config_path # bit cheeky, but just overwrite the global
|
||||
try:
|
||||
with open(config_path, 'r') as config:
|
||||
with open(config_path, "r") as config:
|
||||
syn_cmd.config = json.load(config)
|
||||
try:
|
||||
http_client.verbose = "on" == syn_cmd.config["verbose"]
|
||||
|
@ -717,23 +778,33 @@ def main(server_url, identity_server_url, username, token, config_path):
|
|||
reactor.run()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser("Starts a synapse client.")
|
||||
parser.add_argument(
|
||||
"-s", "--server", dest="server", default="http://localhost:8008",
|
||||
help="The URL of the home server to talk to.")
|
||||
"-s",
|
||||
"--server",
|
||||
dest="server",
|
||||
default="http://localhost:8008",
|
||||
help="The URL of the home server to talk to.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-i", "--identity-server", dest="identityserver", default="http://localhost:8090",
|
||||
help="The URL of the identity server to talk to.")
|
||||
"-i",
|
||||
"--identity-server",
|
||||
dest="identityserver",
|
||||
default="http://localhost:8090",
|
||||
help="The URL of the identity server to talk to.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-u", "--username", dest="username",
|
||||
help="Your username on the server.")
|
||||
"-u", "--username", dest="username", help="Your username on the server."
|
||||
)
|
||||
parser.add_argument("-t", "--token", dest="token", help="Your access token.")
|
||||
parser.add_argument(
|
||||
"-t", "--token", dest="token",
|
||||
help="Your access token.")
|
||||
parser.add_argument(
|
||||
"-c", "--config", dest="config", default=CONFIG_JSON,
|
||||
help="The location of the config.json file to read from.")
|
||||
"-c",
|
||||
"--config",
|
||||
dest="config",
|
||||
default=CONFIG_JSON,
|
||||
help="The location of the config.json file to read from.",
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
if not args.server:
|
||||
|
|
|
@ -73,9 +73,7 @@ class TwistedHttpClient(HttpClient):
|
|||
@defer.inlineCallbacks
|
||||
def put_json(self, url, data):
|
||||
response = yield self._create_put_request(
|
||||
url,
|
||||
data,
|
||||
headers_dict={"Content-Type": ["application/json"]}
|
||||
url, data, headers_dict={"Content-Type": ["application/json"]}
|
||||
)
|
||||
body = yield readBody(response)
|
||||
defer.returnValue((response.code, body))
|
||||
|
@ -95,40 +93,34 @@ class TwistedHttpClient(HttpClient):
|
|||
"""
|
||||
|
||||
if "Content-Type" not in headers_dict:
|
||||
raise defer.error(
|
||||
RuntimeError("Must include Content-Type header for PUTs"))
|
||||
raise defer.error(RuntimeError("Must include Content-Type header for PUTs"))
|
||||
|
||||
return self._create_request(
|
||||
"PUT",
|
||||
url,
|
||||
producer=_JsonProducer(json_data),
|
||||
headers_dict=headers_dict
|
||||
"PUT", url, producer=_JsonProducer(json_data), headers_dict=headers_dict
|
||||
)
|
||||
|
||||
def _create_get_request(self, url, headers_dict={}):
|
||||
""" Wrapper of _create_request to issue a GET request
|
||||
"""
|
||||
return self._create_request(
|
||||
"GET",
|
||||
url,
|
||||
headers_dict=headers_dict
|
||||
)
|
||||
return self._create_request("GET", url, headers_dict=headers_dict)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def do_request(self, method, url, data=None, qparams=None, jsonreq=True, headers={}):
|
||||
def do_request(
|
||||
self, method, url, data=None, qparams=None, jsonreq=True, headers={}
|
||||
):
|
||||
if qparams:
|
||||
url = "%s?%s" % (url, urllib.urlencode(qparams, True))
|
||||
|
||||
if jsonreq:
|
||||
prod = _JsonProducer(data)
|
||||
headers['Content-Type'] = ["application/json"];
|
||||
headers["Content-Type"] = ["application/json"]
|
||||
else:
|
||||
prod = _RawProducer(data)
|
||||
|
||||
if method in ["POST", "PUT"]:
|
||||
response = yield self._create_request(method, url,
|
||||
producer=prod,
|
||||
headers_dict=headers)
|
||||
response = yield self._create_request(
|
||||
method, url, producer=prod, headers_dict=headers
|
||||
)
|
||||
else:
|
||||
response = yield self._create_request(method, url)
|
||||
|
||||
|
@ -155,10 +147,7 @@ class TwistedHttpClient(HttpClient):
|
|||
while True:
|
||||
try:
|
||||
response = yield self.agent.request(
|
||||
method,
|
||||
url.encode("UTF8"),
|
||||
Headers(headers_dict),
|
||||
producer
|
||||
method, url.encode("UTF8"), Headers(headers_dict), producer
|
||||
)
|
||||
break
|
||||
except Exception as e:
|
||||
|
@ -179,6 +168,7 @@ class TwistedHttpClient(HttpClient):
|
|||
reactor.callLater(seconds, d.callback, seconds)
|
||||
return d
|
||||
|
||||
|
||||
class _RawProducer(object):
|
||||
def __init__(self, data):
|
||||
self.data = data
|
||||
|
@ -195,9 +185,11 @@ class _RawProducer(object):
|
|||
def stopProducing(self):
|
||||
pass
|
||||
|
||||
|
||||
class _JsonProducer(object):
|
||||
""" Used by the twisted http client to create the HTTP body from json
|
||||
"""
|
||||
|
||||
def __init__(self, jsn):
|
||||
self.data = jsn
|
||||
self.body = json.dumps(jsn).encode("utf8")
|
||||
|
|
|
@ -19,13 +19,13 @@ from curses.ascii import isprint
|
|||
from twisted.internet import reactor
|
||||
|
||||
|
||||
class CursesStdIO():
|
||||
class CursesStdIO:
|
||||
def __init__(self, stdscr, callback=None):
|
||||
self.statusText = "Synapse test app -"
|
||||
self.searchText = ''
|
||||
self.searchText = ""
|
||||
self.stdscr = stdscr
|
||||
|
||||
self.logLine = ''
|
||||
self.logLine = ""
|
||||
|
||||
self.callback = callback
|
||||
|
||||
|
@ -71,8 +71,7 @@ class CursesStdIO():
|
|||
i = 0
|
||||
index = len(self.lines) - 1
|
||||
while i < (self.rows - 3) and index >= 0:
|
||||
self.stdscr.addstr(self.rows - 3 - i, 0, self.lines[index],
|
||||
curses.A_NORMAL)
|
||||
self.stdscr.addstr(self.rows - 3 - i, 0, self.lines[index], curses.A_NORMAL)
|
||||
i = i + 1
|
||||
index = index - 1
|
||||
|
||||
|
@ -85,15 +84,13 @@ class CursesStdIO():
|
|||
raise RuntimeError("TextTooLongError")
|
||||
|
||||
self.stdscr.addstr(
|
||||
self.rows - 2, 0,
|
||||
text + ' ' * (self.cols - len(text)),
|
||||
curses.A_STANDOUT)
|
||||
self.rows - 2, 0, text + " " * (self.cols - len(text)), curses.A_STANDOUT
|
||||
)
|
||||
|
||||
def printLogLine(self, text):
|
||||
self.stdscr.addstr(
|
||||
0, 0,
|
||||
text + ' ' * (self.cols - len(text)),
|
||||
curses.A_STANDOUT)
|
||||
0, 0, text + " " * (self.cols - len(text)), curses.A_STANDOUT
|
||||
)
|
||||
|
||||
def doRead(self):
|
||||
""" Input is ready! """
|
||||
|
@ -105,7 +102,7 @@ class CursesStdIO():
|
|||
|
||||
elif c == curses.KEY_ENTER or c == 10:
|
||||
text = self.searchText
|
||||
self.searchText = ''
|
||||
self.searchText = ""
|
||||
|
||||
self.print_line(">> %s" % text)
|
||||
|
||||
|
@ -122,11 +119,13 @@ class CursesStdIO():
|
|||
return
|
||||
self.searchText = self.searchText + chr(c)
|
||||
|
||||
self.stdscr.addstr(self.rows - 1, 0,
|
||||
self.searchText + (' ' * (
|
||||
self.cols - len(self.searchText) - 2)))
|
||||
self.stdscr.addstr(
|
||||
self.rows - 1,
|
||||
0,
|
||||
self.searchText + (" " * (self.cols - len(self.searchText) - 2)),
|
||||
)
|
||||
|
||||
self.paintStatus(self.statusText + ' %d' % len(self.searchText))
|
||||
self.paintStatus(self.statusText + " %d" % len(self.searchText))
|
||||
self.stdscr.move(self.rows - 1, len(self.searchText))
|
||||
self.stdscr.refresh()
|
||||
|
||||
|
@ -143,7 +142,6 @@ class CursesStdIO():
|
|||
|
||||
|
||||
class Callback(object):
|
||||
|
||||
def __init__(self, stdio):
|
||||
self.stdio = stdio
|
||||
|
||||
|
@ -152,7 +150,7 @@ class Callback(object):
|
|||
|
||||
|
||||
def main(stdscr):
|
||||
screen = CursesStdIO(stdscr) # create Screen object
|
||||
screen = CursesStdIO(stdscr) # create Screen object
|
||||
|
||||
callback = Callback(screen)
|
||||
|
||||
|
@ -164,5 +162,5 @@ def main(stdscr):
|
|||
screen.close()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
curses.wrapper(main)
|
||||
|
|
|
@ -28,9 +28,7 @@ Currently assumes the local address is localhost:<port>
|
|||
"""
|
||||
|
||||
|
||||
from synapse.federation import (
|
||||
ReplicationHandler
|
||||
)
|
||||
from synapse.federation import ReplicationHandler
|
||||
|
||||
from synapse.federation.units import Pdu
|
||||
|
||||
|
@ -38,7 +36,7 @@ from synapse.util import origin_from_ucid
|
|||
|
||||
from synapse.app.homeserver import SynapseHomeServer
|
||||
|
||||
#from synapse.util.logutils import log_function
|
||||
# from synapse.util.logutils import log_function
|
||||
|
||||
from twisted.internet import reactor, defer
|
||||
from twisted.python import log
|
||||
|
@ -83,7 +81,7 @@ class InputOutput(object):
|
|||
room_name, = m.groups()
|
||||
self.print_line("%s joining %s" % (self.user, room_name))
|
||||
self.server.join_room(room_name, self.user, self.user)
|
||||
#self.print_line("OK.")
|
||||
# self.print_line("OK.")
|
||||
return
|
||||
|
||||
m = re.match("^invite (\S+) (\S+)$", line)
|
||||
|
@ -92,7 +90,7 @@ class InputOutput(object):
|
|||
room_name, invitee = m.groups()
|
||||
self.print_line("%s invited to %s" % (invitee, room_name))
|
||||
self.server.invite_to_room(room_name, self.user, invitee)
|
||||
#self.print_line("OK.")
|
||||
# self.print_line("OK.")
|
||||
return
|
||||
|
||||
m = re.match("^send (\S+) (.*)$", line)
|
||||
|
@ -101,7 +99,7 @@ class InputOutput(object):
|
|||
room_name, body = m.groups()
|
||||
self.print_line("%s send to %s" % (self.user, room_name))
|
||||
self.server.send_message(room_name, self.user, body)
|
||||
#self.print_line("OK.")
|
||||
# self.print_line("OK.")
|
||||
return
|
||||
|
||||
m = re.match("^backfill (\S+)$", line)
|
||||
|
@ -125,7 +123,6 @@ class InputOutput(object):
|
|||
|
||||
|
||||
class IOLoggerHandler(logging.Handler):
|
||||
|
||||
def __init__(self, io):
|
||||
logging.Handler.__init__(self)
|
||||
self.io = io
|
||||
|
@ -142,6 +139,7 @@ class Room(object):
|
|||
""" Used to store (in memory) the current membership state of a room, and
|
||||
which home servers we should send PDUs associated with the room to.
|
||||
"""
|
||||
|
||||
def __init__(self, room_name):
|
||||
self.room_name = room_name
|
||||
self.invited = set()
|
||||
|
@ -175,6 +173,7 @@ class HomeServer(ReplicationHandler):
|
|||
""" A very basic home server implentation that allows people to join a
|
||||
room and then invite other people.
|
||||
"""
|
||||
|
||||
def __init__(self, server_name, replication_layer, output):
|
||||
self.server_name = server_name
|
||||
self.replication_layer = replication_layer
|
||||
|
@ -197,26 +196,27 @@ class HomeServer(ReplicationHandler):
|
|||
elif pdu.content["membership"] == "invite":
|
||||
self._on_invite(pdu.origin, pdu.context, pdu.state_key)
|
||||
else:
|
||||
self.output.print_line("#%s (unrec) %s = %s" %
|
||||
(pdu.context, pdu.pdu_type, json.dumps(pdu.content))
|
||||
self.output.print_line(
|
||||
"#%s (unrec) %s = %s"
|
||||
% (pdu.context, pdu.pdu_type, json.dumps(pdu.content))
|
||||
)
|
||||
|
||||
#def on_state_change(self, pdu):
|
||||
##self.output.print_line("#%s (state) %s *** %s" %
|
||||
##(pdu.context, pdu.state_key, pdu.pdu_type)
|
||||
##)
|
||||
# def on_state_change(self, pdu):
|
||||
##self.output.print_line("#%s (state) %s *** %s" %
|
||||
##(pdu.context, pdu.state_key, pdu.pdu_type)
|
||||
##)
|
||||
|
||||
#if "joinee" in pdu.content:
|
||||
#self._on_join(pdu.context, pdu.content["joinee"])
|
||||
#elif "invitee" in pdu.content:
|
||||
#self._on_invite(pdu.origin, pdu.context, pdu.content["invitee"])
|
||||
# if "joinee" in pdu.content:
|
||||
# self._on_join(pdu.context, pdu.content["joinee"])
|
||||
# elif "invitee" in pdu.content:
|
||||
# self._on_invite(pdu.origin, pdu.context, pdu.content["invitee"])
|
||||
|
||||
def _on_message(self, pdu):
|
||||
""" We received a message
|
||||
"""
|
||||
self.output.print_line("#%s %s %s" %
|
||||
(pdu.context, pdu.content["sender"], pdu.content["body"])
|
||||
)
|
||||
self.output.print_line(
|
||||
"#%s %s %s" % (pdu.context, pdu.content["sender"], pdu.content["body"])
|
||||
)
|
||||
|
||||
def _on_join(self, context, joinee):
|
||||
""" Someone has joined a room, either a remote user or a local user
|
||||
|
@ -224,9 +224,7 @@ class HomeServer(ReplicationHandler):
|
|||
room = self._get_or_create_room(context)
|
||||
room.add_participant(joinee)
|
||||
|
||||
self.output.print_line("#%s %s %s" %
|
||||
(context, joinee, "*** JOINED")
|
||||
)
|
||||
self.output.print_line("#%s %s %s" % (context, joinee, "*** JOINED"))
|
||||
|
||||
def _on_invite(self, origin, context, invitee):
|
||||
""" Someone has been invited
|
||||
|
@ -234,9 +232,7 @@ class HomeServer(ReplicationHandler):
|
|||
room = self._get_or_create_room(context)
|
||||
room.add_invited(invitee)
|
||||
|
||||
self.output.print_line("#%s %s %s" %
|
||||
(context, invitee, "*** INVITED")
|
||||
)
|
||||
self.output.print_line("#%s %s %s" % (context, invitee, "*** INVITED"))
|
||||
|
||||
if not room.have_got_metadata and origin is not self.server_name:
|
||||
logger.debug("Get room state")
|
||||
|
@ -272,14 +268,14 @@ class HomeServer(ReplicationHandler):
|
|||
|
||||
try:
|
||||
pdu = Pdu.create_new(
|
||||
context=room_name,
|
||||
pdu_type="sy.room.member",
|
||||
is_state=True,
|
||||
state_key=joinee,
|
||||
content={"membership": "join"},
|
||||
origin=self.server_name,
|
||||
destinations=destinations,
|
||||
)
|
||||
context=room_name,
|
||||
pdu_type="sy.room.member",
|
||||
is_state=True,
|
||||
state_key=joinee,
|
||||
content={"membership": "join"},
|
||||
origin=self.server_name,
|
||||
destinations=destinations,
|
||||
)
|
||||
yield self.replication_layer.send_pdu(pdu)
|
||||
except Exception as e:
|
||||
logger.exception(e)
|
||||
|
@ -318,21 +314,21 @@ class HomeServer(ReplicationHandler):
|
|||
return self.replication_layer.backfill(dest, room_name, limit)
|
||||
|
||||
def _get_room_remote_servers(self, room_name):
|
||||
return [i for i in self.joined_rooms.setdefault(room_name,).servers]
|
||||
return [i for i in self.joined_rooms.setdefault(room_name).servers]
|
||||
|
||||
def _get_or_create_room(self, room_name):
|
||||
return self.joined_rooms.setdefault(room_name, Room(room_name))
|
||||
|
||||
def get_servers_for_context(self, context):
|
||||
return defer.succeed(
|
||||
self.joined_rooms.setdefault(context, Room(context)).servers
|
||||
)
|
||||
self.joined_rooms.setdefault(context, Room(context)).servers
|
||||
)
|
||||
|
||||
|
||||
def main(stdscr):
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('user', type=str)
|
||||
parser.add_argument('-v', '--verbose', action='count')
|
||||
parser.add_argument("user", type=str)
|
||||
parser.add_argument("-v", "--verbose", action="count")
|
||||
args = parser.parse_args()
|
||||
|
||||
user = args.user
|
||||
|
@ -342,8 +338,9 @@ def main(stdscr):
|
|||
|
||||
root_logger = logging.getLogger()
|
||||
|
||||
formatter = logging.Formatter('%(asctime)s - %(name)s - %(lineno)d - '
|
||||
'%(levelname)s - %(message)s')
|
||||
formatter = logging.Formatter(
|
||||
"%(asctime)s - %(name)s - %(lineno)d - " "%(levelname)s - %(message)s"
|
||||
)
|
||||
if not os.path.exists("logs"):
|
||||
os.makedirs("logs")
|
||||
fh = logging.FileHandler("logs/%s" % user)
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
from __future__ import print_function
|
||||
|
||||
# Copyright 2014-2016 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
|
@ -58,9 +59,9 @@ def make_graph(pdus, room, filename_prefix):
|
|||
name = make_name(pdu.get("pdu_id"), pdu.get("origin"))
|
||||
pdu_map[name] = pdu
|
||||
|
||||
t = datetime.datetime.fromtimestamp(
|
||||
float(pdu["ts"]) / 1000
|
||||
).strftime('%Y-%m-%d %H:%M:%S,%f')
|
||||
t = datetime.datetime.fromtimestamp(float(pdu["ts"]) / 1000).strftime(
|
||||
"%Y-%m-%d %H:%M:%S,%f"
|
||||
)
|
||||
|
||||
label = (
|
||||
"<"
|
||||
|
@ -80,11 +81,7 @@ def make_graph(pdus, room, filename_prefix):
|
|||
"depth": pdu.get("depth"),
|
||||
}
|
||||
|
||||
node = pydot.Node(
|
||||
name=name,
|
||||
label=label,
|
||||
color=color_map[pdu.get("origin")]
|
||||
)
|
||||
node = pydot.Node(name=name, label=label, color=color_map[pdu.get("origin")])
|
||||
node_map[name] = node
|
||||
graph.add_node(node)
|
||||
|
||||
|
@ -108,14 +105,13 @@ def make_graph(pdus, room, filename_prefix):
|
|||
|
||||
if prev_state_name in node_map:
|
||||
state_edge = pydot.Edge(
|
||||
node_map[start_name], node_map[prev_state_name],
|
||||
style='dotted'
|
||||
node_map[start_name], node_map[prev_state_name], style="dotted"
|
||||
)
|
||||
graph.add_edge(state_edge)
|
||||
|
||||
graph.write('%s.dot' % filename_prefix, format='raw', prog='dot')
|
||||
# graph.write_png("%s.png" % filename_prefix, prog='dot')
|
||||
graph.write_svg("%s.svg" % filename_prefix, prog='dot')
|
||||
graph.write("%s.dot" % filename_prefix, format="raw", prog="dot")
|
||||
# graph.write_png("%s.png" % filename_prefix, prog='dot')
|
||||
graph.write_svg("%s.svg" % filename_prefix, prog="dot")
|
||||
|
||||
|
||||
def get_pdus(host, room):
|
||||
|
@ -131,15 +127,14 @@ def get_pdus(host, room):
|
|||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Generate a PDU graph for a given room by talking "
|
||||
"to the given homeserver to get the list of PDUs. \n"
|
||||
"Requires pydot."
|
||||
"to the given homeserver to get the list of PDUs. \n"
|
||||
"Requires pydot."
|
||||
)
|
||||
parser.add_argument(
|
||||
"-p", "--prefix", dest="prefix",
|
||||
help="String to prefix output files with"
|
||||
"-p", "--prefix", dest="prefix", help="String to prefix output files with"
|
||||
)
|
||||
parser.add_argument('host')
|
||||
parser.add_argument('room')
|
||||
parser.add_argument("host")
|
||||
parser.add_argument("room")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
|
|
|
@ -36,10 +36,7 @@ def make_graph(db_name, room_id, file_prefix, limit):
|
|||
args = [room_id]
|
||||
|
||||
if limit:
|
||||
sql += (
|
||||
" ORDER BY topological_ordering DESC, stream_ordering DESC "
|
||||
"LIMIT ?"
|
||||
)
|
||||
sql += " ORDER BY topological_ordering DESC, stream_ordering DESC " "LIMIT ?"
|
||||
|
||||
args.append(limit)
|
||||
|
||||
|
@ -56,9 +53,8 @@ def make_graph(db_name, room_id, file_prefix, limit):
|
|||
|
||||
for event in events:
|
||||
c = conn.execute(
|
||||
"SELECT state_group FROM event_to_state_groups "
|
||||
"WHERE event_id = ?",
|
||||
(event.event_id,)
|
||||
"SELECT state_group FROM event_to_state_groups " "WHERE event_id = ?",
|
||||
(event.event_id,),
|
||||
)
|
||||
|
||||
res = c.fetchone()
|
||||
|
@ -69,7 +65,7 @@ def make_graph(db_name, room_id, file_prefix, limit):
|
|||
|
||||
t = datetime.datetime.fromtimestamp(
|
||||
float(event.origin_server_ts) / 1000
|
||||
).strftime('%Y-%m-%d %H:%M:%S,%f')
|
||||
).strftime("%Y-%m-%d %H:%M:%S,%f")
|
||||
|
||||
content = json.dumps(unfreeze(event.get_dict()["content"]))
|
||||
|
||||
|
@ -93,10 +89,7 @@ def make_graph(db_name, room_id, file_prefix, limit):
|
|||
"state_group": state_group,
|
||||
}
|
||||
|
||||
node = pydot.Node(
|
||||
name=event.event_id,
|
||||
label=label,
|
||||
)
|
||||
node = pydot.Node(name=event.event_id, label=label)
|
||||
|
||||
node_map[event.event_id] = node
|
||||
graph.add_node(node)
|
||||
|
@ -106,10 +99,7 @@ def make_graph(db_name, room_id, file_prefix, limit):
|
|||
try:
|
||||
end_node = node_map[prev_id]
|
||||
except:
|
||||
end_node = pydot.Node(
|
||||
name=prev_id,
|
||||
label="<<b>%s</b>>" % (prev_id,),
|
||||
)
|
||||
end_node = pydot.Node(name=prev_id, label="<<b>%s</b>>" % (prev_id,))
|
||||
|
||||
node_map[prev_id] = end_node
|
||||
graph.add_node(end_node)
|
||||
|
@ -121,36 +111,33 @@ def make_graph(db_name, room_id, file_prefix, limit):
|
|||
if len(event_ids) <= 1:
|
||||
continue
|
||||
|
||||
cluster = pydot.Cluster(
|
||||
str(group),
|
||||
label="<State Group: %s>" % (str(group),)
|
||||
)
|
||||
cluster = pydot.Cluster(str(group), label="<State Group: %s>" % (str(group),))
|
||||
|
||||
for event_id in event_ids:
|
||||
cluster.add_node(node_map[event_id])
|
||||
|
||||
graph.add_subgraph(cluster)
|
||||
|
||||
graph.write('%s.dot' % file_prefix, format='raw', prog='dot')
|
||||
graph.write_svg("%s.svg" % file_prefix, prog='dot')
|
||||
graph.write("%s.dot" % file_prefix, format="raw", prog="dot")
|
||||
graph.write_svg("%s.svg" % file_prefix, prog="dot")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Generate a PDU graph for a given room by talking "
|
||||
"to the given homeserver to get the list of PDUs. \n"
|
||||
"Requires pydot."
|
||||
"to the given homeserver to get the list of PDUs. \n"
|
||||
"Requires pydot."
|
||||
)
|
||||
parser.add_argument(
|
||||
"-p", "--prefix", dest="prefix",
|
||||
"-p",
|
||||
"--prefix",
|
||||
dest="prefix",
|
||||
help="String to prefix output files with",
|
||||
default="graph_output"
|
||||
default="graph_output",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-l", "--limit",
|
||||
help="Only retrieve the last N events.",
|
||||
)
|
||||
parser.add_argument('db')
|
||||
parser.add_argument('room')
|
||||
parser.add_argument("-l", "--limit", help="Only retrieve the last N events.")
|
||||
parser.add_argument("db")
|
||||
parser.add_argument("room")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
from __future__ import print_function
|
||||
|
||||
# Copyright 2016 OpenMarket Ltd
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
|
@ -42,7 +43,7 @@ def make_graph(file_name, room_id, file_prefix, limit):
|
|||
print("Sorted events")
|
||||
|
||||
if limit:
|
||||
events = events[-int(limit):]
|
||||
events = events[-int(limit) :]
|
||||
|
||||
node_map = {}
|
||||
|
||||
|
@ -51,7 +52,7 @@ def make_graph(file_name, room_id, file_prefix, limit):
|
|||
for event in events:
|
||||
t = datetime.datetime.fromtimestamp(
|
||||
float(event.origin_server_ts) / 1000
|
||||
).strftime('%Y-%m-%d %H:%M:%S,%f')
|
||||
).strftime("%Y-%m-%d %H:%M:%S,%f")
|
||||
|
||||
content = json.dumps(unfreeze(event.get_dict()["content"]), indent=4)
|
||||
content = content.replace("\n", "<br/>\n")
|
||||
|
@ -67,9 +68,10 @@ def make_graph(file_name, room_id, file_prefix, limit):
|
|||
value = json.dumps(value)
|
||||
|
||||
content.append(
|
||||
"<b>%s</b>: %s," % (
|
||||
cgi.escape(key, quote=True).encode("ascii", 'xmlcharrefreplace'),
|
||||
cgi.escape(value, quote=True).encode("ascii", 'xmlcharrefreplace'),
|
||||
"<b>%s</b>: %s,"
|
||||
% (
|
||||
cgi.escape(key, quote=True).encode("ascii", "xmlcharrefreplace"),
|
||||
cgi.escape(value, quote=True).encode("ascii", "xmlcharrefreplace"),
|
||||
)
|
||||
)
|
||||
|
||||
|
@ -95,10 +97,7 @@ def make_graph(file_name, room_id, file_prefix, limit):
|
|||
"depth": event.depth,
|
||||
}
|
||||
|
||||
node = pydot.Node(
|
||||
name=event.event_id,
|
||||
label=label,
|
||||
)
|
||||
node = pydot.Node(name=event.event_id, label=label)
|
||||
|
||||
node_map[event.event_id] = node
|
||||
graph.add_node(node)
|
||||
|
@ -110,10 +109,7 @@ def make_graph(file_name, room_id, file_prefix, limit):
|
|||
try:
|
||||
end_node = node_map[prev_id]
|
||||
except:
|
||||
end_node = pydot.Node(
|
||||
name=prev_id,
|
||||
label="<<b>%s</b>>" % (prev_id,),
|
||||
)
|
||||
end_node = pydot.Node(name=prev_id, label="<<b>%s</b>>" % (prev_id,))
|
||||
|
||||
node_map[prev_id] = end_node
|
||||
graph.add_node(end_node)
|
||||
|
@ -123,31 +119,31 @@ def make_graph(file_name, room_id, file_prefix, limit):
|
|||
|
||||
print("Created edges")
|
||||
|
||||
graph.write('%s.dot' % file_prefix, format='raw', prog='dot')
|
||||
graph.write("%s.dot" % file_prefix, format="raw", prog="dot")
|
||||
|
||||
print("Created Dot")
|
||||
|
||||
graph.write_svg("%s.svg" % file_prefix, prog='dot')
|
||||
graph.write_svg("%s.svg" % file_prefix, prog="dot")
|
||||
|
||||
print("Created svg")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Generate a PDU graph for a given room by reading "
|
||||
"from a file with line deliminated events. \n"
|
||||
"Requires pydot."
|
||||
"from a file with line deliminated events. \n"
|
||||
"Requires pydot."
|
||||
)
|
||||
parser.add_argument(
|
||||
"-p", "--prefix", dest="prefix",
|
||||
"-p",
|
||||
"--prefix",
|
||||
dest="prefix",
|
||||
help="String to prefix output files with",
|
||||
default="graph_output"
|
||||
default="graph_output",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-l", "--limit",
|
||||
help="Only retrieve the last N events.",
|
||||
)
|
||||
parser.add_argument('event_file')
|
||||
parser.add_argument('room')
|
||||
parser.add_argument("-l", "--limit", help="Only retrieve the last N events.")
|
||||
parser.add_argument("event_file")
|
||||
parser.add_argument("room")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
|
|
|
@ -20,24 +20,25 @@ import urllib
|
|||
import subprocess
|
||||
import time
|
||||
|
||||
#ACCESS_TOKEN="" #
|
||||
# ACCESS_TOKEN="" #
|
||||
|
||||
MATRIXBASE = 'https://matrix.org/_matrix/client/api/v1/'
|
||||
MYUSERNAME = '@davetest:matrix.org'
|
||||
MATRIXBASE = "https://matrix.org/_matrix/client/api/v1/"
|
||||
MYUSERNAME = "@davetest:matrix.org"
|
||||
|
||||
HTTPBIND = 'https://meet.jit.si/http-bind'
|
||||
#HTTPBIND = 'https://jitsi.vuc.me/http-bind'
|
||||
#ROOMNAME = "matrix"
|
||||
HTTPBIND = "https://meet.jit.si/http-bind"
|
||||
# HTTPBIND = 'https://jitsi.vuc.me/http-bind'
|
||||
# ROOMNAME = "matrix"
|
||||
ROOMNAME = "pibble"
|
||||
|
||||
HOST="guest.jit.si"
|
||||
#HOST="jitsi.vuc.me"
|
||||
HOST = "guest.jit.si"
|
||||
# HOST="jitsi.vuc.me"
|
||||
|
||||
TURNSERVER="turn.guest.jit.si"
|
||||
#TURNSERVER="turn.jitsi.vuc.me"
|
||||
TURNSERVER = "turn.guest.jit.si"
|
||||
# TURNSERVER="turn.jitsi.vuc.me"
|
||||
|
||||
ROOMDOMAIN = "meet.jit.si"
|
||||
# ROOMDOMAIN="conference.jitsi.vuc.me"
|
||||
|
||||
ROOMDOMAIN="meet.jit.si"
|
||||
#ROOMDOMAIN="conference.jitsi.vuc.me"
|
||||
|
||||
class TrivialMatrixClient:
|
||||
def __init__(self, access_token):
|
||||
|
@ -46,38 +47,50 @@ class TrivialMatrixClient:
|
|||
|
||||
def getEvent(self):
|
||||
while True:
|
||||
url = MATRIXBASE+'events?access_token='+self.access_token+"&timeout=60000"
|
||||
url = (
|
||||
MATRIXBASE
|
||||
+ "events?access_token="
|
||||
+ self.access_token
|
||||
+ "&timeout=60000"
|
||||
)
|
||||
if self.token:
|
||||
url += "&from="+self.token
|
||||
url += "&from=" + self.token
|
||||
req = grequests.get(url)
|
||||
resps = grequests.map([req])
|
||||
obj = json.loads(resps[0].content)
|
||||
print("incoming from matrix",obj)
|
||||
if 'end' not in obj:
|
||||
print("incoming from matrix", obj)
|
||||
if "end" not in obj:
|
||||
continue
|
||||
self.token = obj['end']
|
||||
if len(obj['chunk']):
|
||||
return obj['chunk'][0]
|
||||
self.token = obj["end"]
|
||||
if len(obj["chunk"]):
|
||||
return obj["chunk"][0]
|
||||
|
||||
def joinRoom(self, roomId):
|
||||
url = MATRIXBASE+'rooms/'+roomId+'/join?access_token='+self.access_token
|
||||
url = MATRIXBASE + "rooms/" + roomId + "/join?access_token=" + self.access_token
|
||||
print(url)
|
||||
headers={ 'Content-Type': 'application/json' }
|
||||
req = grequests.post(url, headers=headers, data='{}')
|
||||
headers = {"Content-Type": "application/json"}
|
||||
req = grequests.post(url, headers=headers, data="{}")
|
||||
resps = grequests.map([req])
|
||||
obj = json.loads(resps[0].content)
|
||||
print("response: ",obj)
|
||||
print("response: ", obj)
|
||||
|
||||
def sendEvent(self, roomId, evType, event):
|
||||
url = MATRIXBASE+'rooms/'+roomId+'/send/'+evType+'?access_token='+self.access_token
|
||||
url = (
|
||||
MATRIXBASE
|
||||
+ "rooms/"
|
||||
+ roomId
|
||||
+ "/send/"
|
||||
+ evType
|
||||
+ "?access_token="
|
||||
+ self.access_token
|
||||
)
|
||||
print(url)
|
||||
print(json.dumps(event))
|
||||
headers={ 'Content-Type': 'application/json' }
|
||||
headers = {"Content-Type": "application/json"}
|
||||
req = grequests.post(url, headers=headers, data=json.dumps(event))
|
||||
resps = grequests.map([req])
|
||||
obj = json.loads(resps[0].content)
|
||||
print("response: ",obj)
|
||||
|
||||
print("response: ", obj)
|
||||
|
||||
|
||||
xmppClients = {}
|
||||
|
@ -87,38 +100,39 @@ def matrixLoop():
|
|||
while True:
|
||||
ev = matrixCli.getEvent()
|
||||
print(ev)
|
||||
if ev['type'] == 'm.room.member':
|
||||
print('membership event')
|
||||
if ev['membership'] == 'invite' and ev['state_key'] == MYUSERNAME:
|
||||
roomId = ev['room_id']
|
||||
if ev["type"] == "m.room.member":
|
||||
print("membership event")
|
||||
if ev["membership"] == "invite" and ev["state_key"] == MYUSERNAME:
|
||||
roomId = ev["room_id"]
|
||||
print("joining room %s" % (roomId))
|
||||
matrixCli.joinRoom(roomId)
|
||||
elif ev['type'] == 'm.room.message':
|
||||
if ev['room_id'] in xmppClients:
|
||||
elif ev["type"] == "m.room.message":
|
||||
if ev["room_id"] in xmppClients:
|
||||
print("already have a bridge for that user, ignoring")
|
||||
continue
|
||||
print("got message, connecting")
|
||||
xmppClients[ev['room_id']] = TrivialXmppClient(ev['room_id'], ev['user_id'])
|
||||
gevent.spawn(xmppClients[ev['room_id']].xmppLoop)
|
||||
elif ev['type'] == 'm.call.invite':
|
||||
xmppClients[ev["room_id"]] = TrivialXmppClient(ev["room_id"], ev["user_id"])
|
||||
gevent.spawn(xmppClients[ev["room_id"]].xmppLoop)
|
||||
elif ev["type"] == "m.call.invite":
|
||||
print("Incoming call")
|
||||
#sdp = ev['content']['offer']['sdp']
|
||||
#print "sdp: %s" % (sdp)
|
||||
#xmppClients[ev['room_id']] = TrivialXmppClient(ev['room_id'], ev['user_id'])
|
||||
#gevent.spawn(xmppClients[ev['room_id']].xmppLoop)
|
||||
elif ev['type'] == 'm.call.answer':
|
||||
# sdp = ev['content']['offer']['sdp']
|
||||
# print "sdp: %s" % (sdp)
|
||||
# xmppClients[ev['room_id']] = TrivialXmppClient(ev['room_id'], ev['user_id'])
|
||||
# gevent.spawn(xmppClients[ev['room_id']].xmppLoop)
|
||||
elif ev["type"] == "m.call.answer":
|
||||
print("Call answered")
|
||||
sdp = ev['content']['answer']['sdp']
|
||||
if ev['room_id'] not in xmppClients:
|
||||
sdp = ev["content"]["answer"]["sdp"]
|
||||
if ev["room_id"] not in xmppClients:
|
||||
print("We didn't have a call for that room")
|
||||
continue
|
||||
# should probably check call ID too
|
||||
xmppCli = xmppClients[ev['room_id']]
|
||||
xmppCli = xmppClients[ev["room_id"]]
|
||||
xmppCli.sendAnswer(sdp)
|
||||
elif ev['type'] == 'm.call.hangup':
|
||||
if ev['room_id'] in xmppClients:
|
||||
xmppClients[ev['room_id']].stop()
|
||||
del xmppClients[ev['room_id']]
|
||||
elif ev["type"] == "m.call.hangup":
|
||||
if ev["room_id"] in xmppClients:
|
||||
xmppClients[ev["room_id"]].stop()
|
||||
del xmppClients[ev["room_id"]]
|
||||
|
||||
|
||||
class TrivialXmppClient:
|
||||
def __init__(self, matrixRoom, userId):
|
||||
|
@ -132,130 +146,155 @@ class TrivialXmppClient:
|
|||
|
||||
def nextRid(self):
|
||||
self.rid += 1
|
||||
return '%d' % (self.rid)
|
||||
return "%d" % (self.rid)
|
||||
|
||||
def sendIq(self, xml):
|
||||
fullXml = "<body rid='%s' xmlns='http://jabber.org/protocol/httpbind' sid='%s'>%s</body>" % (self.nextRid(), self.sid, xml)
|
||||
#print "\t>>>%s" % (fullXml)
|
||||
fullXml = (
|
||||
"<body rid='%s' xmlns='http://jabber.org/protocol/httpbind' sid='%s'>%s</body>"
|
||||
% (self.nextRid(), self.sid, xml)
|
||||
)
|
||||
# print "\t>>>%s" % (fullXml)
|
||||
return self.xmppPoke(fullXml)
|
||||
|
||||
def xmppPoke(self, xml):
|
||||
headers = {'Content-Type': 'application/xml'}
|
||||
headers = {"Content-Type": "application/xml"}
|
||||
req = grequests.post(HTTPBIND, verify=False, headers=headers, data=xml)
|
||||
resps = grequests.map([req])
|
||||
obj = BeautifulSoup(resps[0].content)
|
||||
return obj
|
||||
|
||||
def sendAnswer(self, answer):
|
||||
print("sdp from matrix client",answer)
|
||||
p = subprocess.Popen(['node', 'unjingle/unjingle.js', '--sdp'], stdin=subprocess.PIPE, stdout=subprocess.PIPE)
|
||||
print("sdp from matrix client", answer)
|
||||
p = subprocess.Popen(
|
||||
["node", "unjingle/unjingle.js", "--sdp"],
|
||||
stdin=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
)
|
||||
jingle, out_err = p.communicate(answer)
|
||||
jingle = jingle % {
|
||||
'tojid': self.callfrom,
|
||||
'action': 'session-accept',
|
||||
'initiator': self.callfrom,
|
||||
'responder': self.jid,
|
||||
'sid': self.callsid
|
||||
"tojid": self.callfrom,
|
||||
"action": "session-accept",
|
||||
"initiator": self.callfrom,
|
||||
"responder": self.jid,
|
||||
"sid": self.callsid,
|
||||
}
|
||||
print("answer jingle from sdp",jingle)
|
||||
print("answer jingle from sdp", jingle)
|
||||
res = self.sendIq(jingle)
|
||||
print("reply from answer: ",res)
|
||||
print("reply from answer: ", res)
|
||||
|
||||
self.ssrcs = {}
|
||||
jingleSoup = BeautifulSoup(jingle)
|
||||
for cont in jingleSoup.iq.jingle.findAll('content'):
|
||||
for cont in jingleSoup.iq.jingle.findAll("content"):
|
||||
if cont.description:
|
||||
self.ssrcs[cont['name']] = cont.description['ssrc']
|
||||
print("my ssrcs:",self.ssrcs)
|
||||
self.ssrcs[cont["name"]] = cont.description["ssrc"]
|
||||
print("my ssrcs:", self.ssrcs)
|
||||
|
||||
gevent.joinall([
|
||||
gevent.spawn(self.advertiseSsrcs)
|
||||
])
|
||||
gevent.joinall([gevent.spawn(self.advertiseSsrcs)])
|
||||
|
||||
def advertiseSsrcs(self):
|
||||
time.sleep(7)
|
||||
print("SSRC spammer started")
|
||||
while self.running:
|
||||
ssrcMsg = "<presence to='%(tojid)s' xmlns='jabber:client'><x xmlns='http://jabber.org/protocol/muc'/><c xmlns='http://jabber.org/protocol/caps' hash='sha-1' node='http://jitsi.org/jitsimeet' ver='0WkSdhFnAUxrz4ImQQLdB80GFlE='/><nick xmlns='http://jabber.org/protocol/nick'>%(nick)s</nick><stats xmlns='http://jitsi.org/jitmeet/stats'><stat name='bitrate_download' value='175'/><stat name='bitrate_upload' value='176'/><stat name='packetLoss_total' value='0'/><stat name='packetLoss_download' value='0'/><stat name='packetLoss_upload' value='0'/></stats><media xmlns='http://estos.de/ns/mjs'><source type='audio' ssrc='%(assrc)s' direction='sendre'/><source type='video' ssrc='%(vssrc)s' direction='sendre'/></media></presence>" % { 'tojid': "%s@%s/%s" % (ROOMNAME, ROOMDOMAIN, self.shortJid), 'nick': self.userId, 'assrc': self.ssrcs['audio'], 'vssrc': self.ssrcs['video'] }
|
||||
ssrcMsg = (
|
||||
"<presence to='%(tojid)s' xmlns='jabber:client'><x xmlns='http://jabber.org/protocol/muc'/><c xmlns='http://jabber.org/protocol/caps' hash='sha-1' node='http://jitsi.org/jitsimeet' ver='0WkSdhFnAUxrz4ImQQLdB80GFlE='/><nick xmlns='http://jabber.org/protocol/nick'>%(nick)s</nick><stats xmlns='http://jitsi.org/jitmeet/stats'><stat name='bitrate_download' value='175'/><stat name='bitrate_upload' value='176'/><stat name='packetLoss_total' value='0'/><stat name='packetLoss_download' value='0'/><stat name='packetLoss_upload' value='0'/></stats><media xmlns='http://estos.de/ns/mjs'><source type='audio' ssrc='%(assrc)s' direction='sendre'/><source type='video' ssrc='%(vssrc)s' direction='sendre'/></media></presence>"
|
||||
% {
|
||||
"tojid": "%s@%s/%s" % (ROOMNAME, ROOMDOMAIN, self.shortJid),
|
||||
"nick": self.userId,
|
||||
"assrc": self.ssrcs["audio"],
|
||||
"vssrc": self.ssrcs["video"],
|
||||
}
|
||||
)
|
||||
res = self.sendIq(ssrcMsg)
|
||||
print("reply from ssrc announce: ",res)
|
||||
print("reply from ssrc announce: ", res)
|
||||
time.sleep(10)
|
||||
|
||||
|
||||
|
||||
def xmppLoop(self):
|
||||
self.matrixCallId = time.time()
|
||||
res = self.xmppPoke("<body rid='%s' xmlns='http://jabber.org/protocol/httpbind' to='%s' xml:lang='en' wait='60' hold='1' content='text/xml; charset=utf-8' ver='1.6' xmpp:version='1.0' xmlns:xmpp='urn:xmpp:xbosh'/>" % (self.nextRid(), HOST))
|
||||
res = self.xmppPoke(
|
||||
"<body rid='%s' xmlns='http://jabber.org/protocol/httpbind' to='%s' xml:lang='en' wait='60' hold='1' content='text/xml; charset=utf-8' ver='1.6' xmpp:version='1.0' xmlns:xmpp='urn:xmpp:xbosh'/>"
|
||||
% (self.nextRid(), HOST)
|
||||
)
|
||||
|
||||
print(res)
|
||||
self.sid = res.body['sid']
|
||||
self.sid = res.body["sid"]
|
||||
print("sid %s" % (self.sid))
|
||||
|
||||
res = self.sendIq("<auth xmlns='urn:ietf:params:xml:ns:xmpp-sasl' mechanism='ANONYMOUS'/>")
|
||||
res = self.sendIq(
|
||||
"<auth xmlns='urn:ietf:params:xml:ns:xmpp-sasl' mechanism='ANONYMOUS'/>"
|
||||
)
|
||||
|
||||
res = self.xmppPoke("<body rid='%s' xmlns='http://jabber.org/protocol/httpbind' sid='%s' to='%s' xml:lang='en' xmpp:restart='true' xmlns:xmpp='urn:xmpp:xbosh'/>" % (self.nextRid(), self.sid, HOST))
|
||||
res = self.xmppPoke(
|
||||
"<body rid='%s' xmlns='http://jabber.org/protocol/httpbind' sid='%s' to='%s' xml:lang='en' xmpp:restart='true' xmlns:xmpp='urn:xmpp:xbosh'/>"
|
||||
% (self.nextRid(), self.sid, HOST)
|
||||
)
|
||||
|
||||
res = self.sendIq("<iq type='set' id='_bind_auth_2' xmlns='jabber:client'><bind xmlns='urn:ietf:params:xml:ns:xmpp-bind'/></iq>")
|
||||
res = self.sendIq(
|
||||
"<iq type='set' id='_bind_auth_2' xmlns='jabber:client'><bind xmlns='urn:ietf:params:xml:ns:xmpp-bind'/></iq>"
|
||||
)
|
||||
print(res)
|
||||
|
||||
self.jid = res.body.iq.bind.jid.string
|
||||
print("jid: %s" % (self.jid))
|
||||
self.shortJid = self.jid.split('-')[0]
|
||||
self.shortJid = self.jid.split("-")[0]
|
||||
|
||||
res = self.sendIq("<iq type='set' id='_session_auth_2' xmlns='jabber:client'><session xmlns='urn:ietf:params:xml:ns:xmpp-session'/></iq>")
|
||||
res = self.sendIq(
|
||||
"<iq type='set' id='_session_auth_2' xmlns='jabber:client'><session xmlns='urn:ietf:params:xml:ns:xmpp-session'/></iq>"
|
||||
)
|
||||
|
||||
#randomthing = res.body.iq['to']
|
||||
#whatsitpart = randomthing.split('-')[0]
|
||||
# randomthing = res.body.iq['to']
|
||||
# whatsitpart = randomthing.split('-')[0]
|
||||
|
||||
#print "other random bind thing: %s" % (randomthing)
|
||||
# print "other random bind thing: %s" % (randomthing)
|
||||
|
||||
# advertise preence to the jitsi room, with our nick
|
||||
res = self.sendIq("<iq type='get' to='%s' xmlns='jabber:client' id='1:sendIQ'><services xmlns='urn:xmpp:extdisco:1'><service host='%s'/></services></iq><presence to='%s@%s/d98f6c40' xmlns='jabber:client'><x xmlns='http://jabber.org/protocol/muc'/><c xmlns='http://jabber.org/protocol/caps' hash='sha-1' node='http://jitsi.org/jitsimeet' ver='0WkSdhFnAUxrz4ImQQLdB80GFlE='/><nick xmlns='http://jabber.org/protocol/nick'>%s</nick></presence>" % (HOST, TURNSERVER, ROOMNAME, ROOMDOMAIN, self.userId))
|
||||
self.muc = {'users': []}
|
||||
for p in res.body.findAll('presence'):
|
||||
res = self.sendIq(
|
||||
"<iq type='get' to='%s' xmlns='jabber:client' id='1:sendIQ'><services xmlns='urn:xmpp:extdisco:1'><service host='%s'/></services></iq><presence to='%s@%s/d98f6c40' xmlns='jabber:client'><x xmlns='http://jabber.org/protocol/muc'/><c xmlns='http://jabber.org/protocol/caps' hash='sha-1' node='http://jitsi.org/jitsimeet' ver='0WkSdhFnAUxrz4ImQQLdB80GFlE='/><nick xmlns='http://jabber.org/protocol/nick'>%s</nick></presence>"
|
||||
% (HOST, TURNSERVER, ROOMNAME, ROOMDOMAIN, self.userId)
|
||||
)
|
||||
self.muc = {"users": []}
|
||||
for p in res.body.findAll("presence"):
|
||||
u = {}
|
||||
u['shortJid'] = p['from'].split('/')[1]
|
||||
u["shortJid"] = p["from"].split("/")[1]
|
||||
if p.c and p.c.nick:
|
||||
u['nick'] = p.c.nick.string
|
||||
self.muc['users'].append(u)
|
||||
print("muc: ",self.muc)
|
||||
u["nick"] = p.c.nick.string
|
||||
self.muc["users"].append(u)
|
||||
print("muc: ", self.muc)
|
||||
|
||||
# wait for stuff
|
||||
while True:
|
||||
print("waiting...")
|
||||
res = self.sendIq("")
|
||||
print("got from stream: ",res)
|
||||
print("got from stream: ", res)
|
||||
if res.body.iq:
|
||||
jingles = res.body.iq.findAll('jingle')
|
||||
jingles = res.body.iq.findAll("jingle")
|
||||
if len(jingles):
|
||||
self.callfrom = res.body.iq['from']
|
||||
self.callfrom = res.body.iq["from"]
|
||||
self.handleInvite(jingles[0])
|
||||
elif 'type' in res.body and res.body['type'] == 'terminate':
|
||||
elif "type" in res.body and res.body["type"] == "terminate":
|
||||
self.running = False
|
||||
del xmppClients[self.matrixRoom]
|
||||
return
|
||||
|
||||
def handleInvite(self, jingle):
|
||||
self.initiator = jingle['initiator']
|
||||
self.callsid = jingle['sid']
|
||||
p = subprocess.Popen(['node', 'unjingle/unjingle.js', '--jingle'], stdin=subprocess.PIPE, stdout=subprocess.PIPE)
|
||||
print("raw jingle invite",str(jingle))
|
||||
self.initiator = jingle["initiator"]
|
||||
self.callsid = jingle["sid"]
|
||||
p = subprocess.Popen(
|
||||
["node", "unjingle/unjingle.js", "--jingle"],
|
||||
stdin=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
)
|
||||
print("raw jingle invite", str(jingle))
|
||||
sdp, out_err = p.communicate(str(jingle))
|
||||
print("transformed remote offer sdp",sdp)
|
||||
print("transformed remote offer sdp", sdp)
|
||||
inviteEvent = {
|
||||
'offer': {
|
||||
'type': 'offer',
|
||||
'sdp': sdp
|
||||
},
|
||||
'call_id': self.matrixCallId,
|
||||
'version': 0,
|
||||
'lifetime': 30000
|
||||
"offer": {"type": "offer", "sdp": sdp},
|
||||
"call_id": self.matrixCallId,
|
||||
"version": 0,
|
||||
"lifetime": 30000,
|
||||
}
|
||||
matrixCli.sendEvent(self.matrixRoom, 'm.call.invite', inviteEvent)
|
||||
matrixCli.sendEvent(self.matrixRoom, "m.call.invite", inviteEvent)
|
||||
|
||||
|
||||
matrixCli = TrivialMatrixClient(ACCESS_TOKEN) # Undefined name
|
||||
|
||||
gevent.joinall([
|
||||
gevent.spawn(matrixLoop)
|
||||
])
|
||||
|
||||
gevent.joinall([gevent.spawn(matrixLoop)])
|
||||
|
|
|
@ -11,22 +11,22 @@ try:
|
|||
except NameError: # Python 3
|
||||
raw_input = input
|
||||
|
||||
|
||||
def _mkurl(template, kws):
|
||||
for key in kws:
|
||||
template = template.replace(key, kws[key])
|
||||
return template
|
||||
|
||||
|
||||
def main(hs, room_id, access_token, user_id_prefix, why):
|
||||
if not why:
|
||||
why = "Automated kick."
|
||||
print("Kicking members on %s in room %s matching %s" % (hs, room_id, user_id_prefix))
|
||||
print(
|
||||
"Kicking members on %s in room %s matching %s" % (hs, room_id, user_id_prefix)
|
||||
)
|
||||
room_state_url = _mkurl(
|
||||
"$HS/_matrix/client/api/v1/rooms/$ROOM/state?access_token=$TOKEN",
|
||||
{
|
||||
"$HS": hs,
|
||||
"$ROOM": room_id,
|
||||
"$TOKEN": access_token
|
||||
}
|
||||
{"$HS": hs, "$ROOM": room_id, "$TOKEN": access_token},
|
||||
)
|
||||
print("Getting room state => %s" % room_state_url)
|
||||
res = requests.get(room_state_url)
|
||||
|
@ -57,24 +57,16 @@ def main(hs, room_id, access_token, user_id_prefix, why):
|
|||
for uid in kick_list:
|
||||
print(uid)
|
||||
doit = raw_input("Continue? [Y]es\n")
|
||||
if len(doit) > 0 and doit.lower() == 'y':
|
||||
if len(doit) > 0 and doit.lower() == "y":
|
||||
print("Kicking members...")
|
||||
# encode them all
|
||||
kick_list = [urllib.quote(uid) for uid in kick_list]
|
||||
for uid in kick_list:
|
||||
kick_url = _mkurl(
|
||||
"$HS/_matrix/client/api/v1/rooms/$ROOM/state/m.room.member/$UID?access_token=$TOKEN",
|
||||
{
|
||||
"$HS": hs,
|
||||
"$UID": uid,
|
||||
"$ROOM": room_id,
|
||||
"$TOKEN": access_token
|
||||
}
|
||||
{"$HS": hs, "$UID": uid, "$ROOM": room_id, "$TOKEN": access_token},
|
||||
)
|
||||
kick_body = {
|
||||
"membership": "leave",
|
||||
"reason": why
|
||||
}
|
||||
kick_body = {"membership": "leave", "reason": why}
|
||||
print("Kicking %s" % uid)
|
||||
res = requests.put(kick_url, data=json.dumps(kick_body))
|
||||
if res.status_code != 200:
|
||||
|
@ -83,14 +75,15 @@ def main(hs, room_id, access_token, user_id_prefix, why):
|
|||
print("ERROR: JSON %s" % res.json())
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = ArgumentParser("Kick members in a room matching a certain user ID prefix.")
|
||||
parser.add_argument("-u","--user-id",help="The user ID prefix e.g. '@irc_'")
|
||||
parser.add_argument("-t","--token",help="Your access_token")
|
||||
parser.add_argument("-r","--room",help="The room ID to kick members in")
|
||||
parser.add_argument("-s","--homeserver",help="The base HS url e.g. http://matrix.org")
|
||||
parser.add_argument("-w","--why",help="Reason for the kick. Optional.")
|
||||
parser.add_argument("-u", "--user-id", help="The user ID prefix e.g. '@irc_'")
|
||||
parser.add_argument("-t", "--token", help="Your access_token")
|
||||
parser.add_argument("-r", "--room", help="The room ID to kick members in")
|
||||
parser.add_argument(
|
||||
"-s", "--homeserver", help="The base HS url e.g. http://matrix.org"
|
||||
)
|
||||
parser.add_argument("-w", "--why", help="Reason for the kick. Optional.")
|
||||
args = parser.parse_args()
|
||||
if not args.room or not args.token or not args.user_id or not args.homeserver:
|
||||
parser.print_help()
|
||||
|
|
|
@ -6,23 +6,25 @@ import cgi, logging
|
|||
|
||||
from daemonize import Daemonize
|
||||
|
||||
|
||||
class SimpleHTTPRequestHandlerWithPOST(SimpleHTTPServer.SimpleHTTPRequestHandler):
|
||||
UPLOAD_PATH = "upload"
|
||||
|
||||
"""
|
||||
Accept all post request as file upload
|
||||
"""
|
||||
|
||||
def do_POST(self):
|
||||
|
||||
path = os.path.join(self.UPLOAD_PATH, os.path.basename(self.path))
|
||||
length = self.headers['content-length']
|
||||
length = self.headers["content-length"]
|
||||
data = self.rfile.read(int(length))
|
||||
|
||||
with open(path, 'wb') as fh:
|
||||
with open(path, "wb") as fh:
|
||||
fh.write(data)
|
||||
|
||||
self.send_response(200)
|
||||
self.send_header('Content-Type', 'application/json')
|
||||
self.send_header("Content-Type", "application/json")
|
||||
self.end_headers()
|
||||
|
||||
# Return the absolute path of the uploaded file
|
||||
|
@ -33,30 +35,25 @@ def setup():
|
|||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("directory")
|
||||
parser.add_argument("-p", "--port", dest="port", type=int, default=8080)
|
||||
parser.add_argument('-P', "--pid-file", dest="pid", default="web.pid")
|
||||
parser.add_argument("-P", "--pid-file", dest="pid", default="web.pid")
|
||||
args = parser.parse_args()
|
||||
|
||||
# Get absolute path to directory to serve, as daemonize changes to '/'
|
||||
os.chdir(args.directory)
|
||||
dr = os.getcwd()
|
||||
|
||||
httpd = BaseHTTPServer.HTTPServer(
|
||||
('', args.port),
|
||||
SimpleHTTPRequestHandlerWithPOST
|
||||
)
|
||||
httpd = BaseHTTPServer.HTTPServer(("", args.port), SimpleHTTPRequestHandlerWithPOST)
|
||||
|
||||
def run():
|
||||
os.chdir(dr)
|
||||
httpd.serve_forever()
|
||||
|
||||
daemon = Daemonize(
|
||||
app="synapse-webclient",
|
||||
pid=args.pid,
|
||||
action=run,
|
||||
auto_close_fds=False,
|
||||
)
|
||||
app="synapse-webclient", pid=args.pid, action=run, auto_close_fds=False
|
||||
)
|
||||
|
||||
daemon.start()
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
if __name__ == "__main__":
|
||||
setup()
|
||||
|
|
|
@ -8,7 +8,10 @@ import glob
|
|||
import codecs
|
||||
|
||||
# Utility functions
|
||||
convert = lambda src, dst, environ: open(dst, "w").write(jinja2.Template(open(src).read()).render(**environ))
|
||||
convert = lambda src, dst, environ: open(dst, "w").write(
|
||||
jinja2.Template(open(src).read()).render(**environ)
|
||||
)
|
||||
|
||||
|
||||
def check_arguments(environ, args):
|
||||
for argument in args:
|
||||
|
@ -16,18 +19,22 @@ def check_arguments(environ, args):
|
|||
print("Environment variable %s is mandatory, exiting." % argument)
|
||||
sys.exit(2)
|
||||
|
||||
|
||||
def generate_secrets(environ, secrets):
|
||||
for name, secret in secrets.items():
|
||||
if secret not in environ:
|
||||
filename = "/data/%s.%s.key" % (environ["SYNAPSE_SERVER_NAME"], name)
|
||||
if os.path.exists(filename):
|
||||
with open(filename) as handle: value = handle.read()
|
||||
with open(filename) as handle:
|
||||
value = handle.read()
|
||||
else:
|
||||
print("Generating a random secret for {}".format(name))
|
||||
value = codecs.encode(os.urandom(32), "hex").decode()
|
||||
with open(filename, "w") as handle: handle.write(value)
|
||||
with open(filename, "w") as handle:
|
||||
handle.write(value)
|
||||
environ[secret] = value
|
||||
|
||||
|
||||
# Prepare the configuration
|
||||
mode = sys.argv[1] if len(sys.argv) > 1 else None
|
||||
environ = os.environ.copy()
|
||||
|
@ -36,12 +43,17 @@ args = ["python", "-m", "synapse.app.homeserver"]
|
|||
|
||||
# In generate mode, generate a configuration, missing keys, then exit
|
||||
if mode == "generate":
|
||||
check_arguments(environ, ("SYNAPSE_SERVER_NAME", "SYNAPSE_REPORT_STATS", "SYNAPSE_CONFIG_PATH"))
|
||||
check_arguments(
|
||||
environ, ("SYNAPSE_SERVER_NAME", "SYNAPSE_REPORT_STATS", "SYNAPSE_CONFIG_PATH")
|
||||
)
|
||||
args += [
|
||||
"--server-name", environ["SYNAPSE_SERVER_NAME"],
|
||||
"--report-stats", environ["SYNAPSE_REPORT_STATS"],
|
||||
"--config-path", environ["SYNAPSE_CONFIG_PATH"],
|
||||
"--generate-config"
|
||||
"--server-name",
|
||||
environ["SYNAPSE_SERVER_NAME"],
|
||||
"--report-stats",
|
||||
environ["SYNAPSE_REPORT_STATS"],
|
||||
"--config-path",
|
||||
environ["SYNAPSE_CONFIG_PATH"],
|
||||
"--generate-config",
|
||||
]
|
||||
os.execv("/usr/local/bin/python", args)
|
||||
|
||||
|
@ -51,15 +63,19 @@ else:
|
|||
config_path = environ["SYNAPSE_CONFIG_PATH"]
|
||||
else:
|
||||
check_arguments(environ, ("SYNAPSE_SERVER_NAME", "SYNAPSE_REPORT_STATS"))
|
||||
generate_secrets(environ, {
|
||||
"registration": "SYNAPSE_REGISTRATION_SHARED_SECRET",
|
||||
"macaroon": "SYNAPSE_MACAROON_SECRET_KEY"
|
||||
})
|
||||
generate_secrets(
|
||||
environ,
|
||||
{
|
||||
"registration": "SYNAPSE_REGISTRATION_SHARED_SECRET",
|
||||
"macaroon": "SYNAPSE_MACAROON_SECRET_KEY",
|
||||
},
|
||||
)
|
||||
environ["SYNAPSE_APPSERVICES"] = glob.glob("/data/appservices/*.yaml")
|
||||
if not os.path.exists("/compiled"): os.mkdir("/compiled")
|
||||
if not os.path.exists("/compiled"):
|
||||
os.mkdir("/compiled")
|
||||
|
||||
config_path = "/compiled/homeserver.yaml"
|
||||
|
||||
|
||||
# Convert SYNAPSE_NO_TLS to boolean if exists
|
||||
if "SYNAPSE_NO_TLS" in environ:
|
||||
tlsanswerstring = str.lower(environ["SYNAPSE_NO_TLS"])
|
||||
|
@ -69,19 +85,23 @@ else:
|
|||
if tlsanswerstring in ("false", "off", "0", "no"):
|
||||
environ["SYNAPSE_NO_TLS"] = False
|
||||
else:
|
||||
print("Environment variable \"SYNAPSE_NO_TLS\" found but value \"" + tlsanswerstring + "\" unrecognized; exiting.")
|
||||
print(
|
||||
'Environment variable "SYNAPSE_NO_TLS" found but value "'
|
||||
+ tlsanswerstring
|
||||
+ '" unrecognized; exiting.'
|
||||
)
|
||||
sys.exit(2)
|
||||
|
||||
convert("/conf/homeserver.yaml", config_path, environ)
|
||||
convert("/conf/log.config", "/compiled/log.config", environ)
|
||||
subprocess.check_output(["chown", "-R", ownership, "/data"])
|
||||
|
||||
|
||||
args += [
|
||||
"--config-path", config_path,
|
||||
|
||||
"--config-path",
|
||||
config_path,
|
||||
# tell synapse to put any generated keys in /data rather than /compiled
|
||||
"--keys-directory", "/data",
|
||||
"--keys-directory",
|
||||
"/data",
|
||||
]
|
||||
|
||||
# Generate missing keys and start synapse
|
||||
|
|
|
@ -18,226 +18,220 @@ import os
|
|||
# If extensions (or modules to document with autodoc) are in another directory,
|
||||
# add these directories to sys.path here. If the directory is relative to the
|
||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||
sys.path.insert(0, os.path.abspath('..'))
|
||||
sys.path.insert(0, os.path.abspath(".."))
|
||||
|
||||
# -- General configuration ------------------------------------------------
|
||||
|
||||
# If your documentation needs a minimal Sphinx version, state it here.
|
||||
#needs_sphinx = '1.0'
|
||||
# needs_sphinx = '1.0'
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be
|
||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
||||
# ones.
|
||||
extensions = [
|
||||
'sphinx.ext.autodoc',
|
||||
'sphinx.ext.intersphinx',
|
||||
'sphinx.ext.coverage',
|
||||
'sphinx.ext.ifconfig',
|
||||
'sphinxcontrib.napoleon',
|
||||
"sphinx.ext.autodoc",
|
||||
"sphinx.ext.intersphinx",
|
||||
"sphinx.ext.coverage",
|
||||
"sphinx.ext.ifconfig",
|
||||
"sphinxcontrib.napoleon",
|
||||
]
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ['_templates']
|
||||
templates_path = ["_templates"]
|
||||
|
||||
# The suffix of source filenames.
|
||||
source_suffix = '.rst'
|
||||
source_suffix = ".rst"
|
||||
|
||||
# The encoding of source files.
|
||||
#source_encoding = 'utf-8-sig'
|
||||
# source_encoding = 'utf-8-sig'
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = 'index'
|
||||
master_doc = "index"
|
||||
|
||||
# General information about the project.
|
||||
project = u'Synapse'
|
||||
copyright = u'Copyright 2014-2017 OpenMarket Ltd, 2017 Vector Creations Ltd, 2017 New Vector Ltd'
|
||||
project = "Synapse"
|
||||
copyright = (
|
||||
"Copyright 2014-2017 OpenMarket Ltd, 2017 Vector Creations Ltd, 2017 New Vector Ltd"
|
||||
)
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
# |version| and |release|, also used in various other places throughout the
|
||||
# built documents.
|
||||
#
|
||||
# The short X.Y version.
|
||||
version = '1.0'
|
||||
version = "1.0"
|
||||
# The full version, including alpha/beta/rc tags.
|
||||
release = '1.0'
|
||||
release = "1.0"
|
||||
|
||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||
# for a list of supported languages.
|
||||
#language = None
|
||||
# language = None
|
||||
|
||||
# There are two options for replacing |today|: either, you set today to some
|
||||
# non-false value, then it is used:
|
||||
#today = ''
|
||||
# today = ''
|
||||
# Else, today_fmt is used as the format for a strftime call.
|
||||
#today_fmt = '%B %d, %Y'
|
||||
# today_fmt = '%B %d, %Y'
|
||||
|
||||
# List of patterns, relative to source directory, that match files and
|
||||
# directories to ignore when looking for source files.
|
||||
exclude_patterns = ['_build']
|
||||
exclude_patterns = ["_build"]
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all
|
||||
# documents.
|
||||
#default_role = None
|
||||
# default_role = None
|
||||
|
||||
# If true, '()' will be appended to :func: etc. cross-reference text.
|
||||
#add_function_parentheses = True
|
||||
# add_function_parentheses = True
|
||||
|
||||
# If true, the current module name will be prepended to all description
|
||||
# unit titles (such as .. function::).
|
||||
#add_module_names = True
|
||||
# add_module_names = True
|
||||
|
||||
# If true, sectionauthor and moduleauthor directives will be shown in the
|
||||
# output. They are ignored by default.
|
||||
#show_authors = False
|
||||
# show_authors = False
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
pygments_style = 'sphinx'
|
||||
pygments_style = "sphinx"
|
||||
|
||||
# A list of ignored prefixes for module index sorting.
|
||||
#modindex_common_prefix = []
|
||||
# modindex_common_prefix = []
|
||||
|
||||
# If true, keep warnings as "system message" paragraphs in the built documents.
|
||||
#keep_warnings = False
|
||||
# keep_warnings = False
|
||||
|
||||
|
||||
# -- Options for HTML output ----------------------------------------------
|
||||
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
html_theme = 'default'
|
||||
html_theme = "default"
|
||||
|
||||
# Theme options are theme-specific and customize the look and feel of a theme
|
||||
# further. For a list of options available for each theme, see the
|
||||
# documentation.
|
||||
#html_theme_options = {}
|
||||
# html_theme_options = {}
|
||||
|
||||
# Add any paths that contain custom themes here, relative to this directory.
|
||||
#html_theme_path = []
|
||||
# html_theme_path = []
|
||||
|
||||
# The name for this set of Sphinx documents. If None, it defaults to
|
||||
# "<project> v<release> documentation".
|
||||
#html_title = None
|
||||
# html_title = None
|
||||
|
||||
# A shorter title for the navigation bar. Default is the same as html_title.
|
||||
#html_short_title = None
|
||||
# html_short_title = None
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top
|
||||
# of the sidebar.
|
||||
#html_logo = None
|
||||
# html_logo = None
|
||||
|
||||
# The name of an image file (within the static path) to use as favicon of the
|
||||
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
||||
# pixels large.
|
||||
#html_favicon = None
|
||||
# html_favicon = None
|
||||
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||
html_static_path = ['_static']
|
||||
html_static_path = ["_static"]
|
||||
|
||||
# Add any extra paths that contain custom files (such as robots.txt or
|
||||
# .htaccess) here, relative to this directory. These files are copied
|
||||
# directly to the root of the documentation.
|
||||
#html_extra_path = []
|
||||
# html_extra_path = []
|
||||
|
||||
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
||||
# using the given strftime format.
|
||||
#html_last_updated_fmt = '%b %d, %Y'
|
||||
# html_last_updated_fmt = '%b %d, %Y'
|
||||
|
||||
# If true, SmartyPants will be used to convert quotes and dashes to
|
||||
# typographically correct entities.
|
||||
#html_use_smartypants = True
|
||||
# html_use_smartypants = True
|
||||
|
||||
# Custom sidebar templates, maps document names to template names.
|
||||
#html_sidebars = {}
|
||||
# html_sidebars = {}
|
||||
|
||||
# Additional templates that should be rendered to pages, maps page names to
|
||||
# template names.
|
||||
#html_additional_pages = {}
|
||||
# html_additional_pages = {}
|
||||
|
||||
# If false, no module index is generated.
|
||||
#html_domain_indices = True
|
||||
# html_domain_indices = True
|
||||
|
||||
# If false, no index is generated.
|
||||
#html_use_index = True
|
||||
# html_use_index = True
|
||||
|
||||
# If true, the index is split into individual pages for each letter.
|
||||
#html_split_index = False
|
||||
# html_split_index = False
|
||||
|
||||
# If true, links to the reST sources are added to the pages.
|
||||
#html_show_sourcelink = True
|
||||
# html_show_sourcelink = True
|
||||
|
||||
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
|
||||
#html_show_sphinx = True
|
||||
# html_show_sphinx = True
|
||||
|
||||
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
|
||||
#html_show_copyright = True
|
||||
# html_show_copyright = True
|
||||
|
||||
# If true, an OpenSearch description file will be output, and all pages will
|
||||
# contain a <link> tag referring to it. The value of this option must be the
|
||||
# base URL from which the finished HTML is served.
|
||||
#html_use_opensearch = ''
|
||||
# html_use_opensearch = ''
|
||||
|
||||
# This is the file name suffix for HTML files (e.g. ".xhtml").
|
||||
#html_file_suffix = None
|
||||
# html_file_suffix = None
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = 'Synapsedoc'
|
||||
htmlhelp_basename = "Synapsedoc"
|
||||
|
||||
|
||||
# -- Options for LaTeX output ---------------------------------------------
|
||||
|
||||
latex_elements = {
|
||||
# The paper size ('letterpaper' or 'a4paper').
|
||||
#'papersize': 'letterpaper',
|
||||
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
#'pointsize': '10pt',
|
||||
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
#'preamble': '',
|
||||
# The paper size ('letterpaper' or 'a4paper').
|
||||
#'papersize': 'letterpaper',
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
#'pointsize': '10pt',
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
#'preamble': '',
|
||||
}
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title,
|
||||
# author, documentclass [howto, manual, or own class]).
|
||||
latex_documents = [
|
||||
('index', 'Synapse.tex', u'Synapse Documentation',
|
||||
u'TNG', 'manual'),
|
||||
]
|
||||
latex_documents = [("index", "Synapse.tex", "Synapse Documentation", "TNG", "manual")]
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top of
|
||||
# the title page.
|
||||
#latex_logo = None
|
||||
# latex_logo = None
|
||||
|
||||
# For "manual" documents, if this is true, then toplevel headings are parts,
|
||||
# not chapters.
|
||||
#latex_use_parts = False
|
||||
# latex_use_parts = False
|
||||
|
||||
# If true, show page references after internal links.
|
||||
#latex_show_pagerefs = False
|
||||
# latex_show_pagerefs = False
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
#latex_show_urls = False
|
||||
# latex_show_urls = False
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
#latex_appendices = []
|
||||
# latex_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
#latex_domain_indices = True
|
||||
# latex_domain_indices = True
|
||||
|
||||
|
||||
# -- Options for manual page output ---------------------------------------
|
||||
|
||||
# One entry per manual page. List of tuples
|
||||
# (source start file, name, description, authors, manual section).
|
||||
man_pages = [
|
||||
('index', 'synapse', u'Synapse Documentation',
|
||||
[u'TNG'], 1)
|
||||
]
|
||||
man_pages = [("index", "synapse", "Synapse Documentation", ["TNG"], 1)]
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
#man_show_urls = False
|
||||
# man_show_urls = False
|
||||
|
||||
|
||||
# -- Options for Texinfo output -------------------------------------------
|
||||
|
@ -246,26 +240,32 @@ man_pages = [
|
|||
# (source start file, target name, title, author,
|
||||
# dir menu entry, description, category)
|
||||
texinfo_documents = [
|
||||
('index', 'Synapse', u'Synapse Documentation',
|
||||
u'TNG', 'Synapse', 'One line description of project.',
|
||||
'Miscellaneous'),
|
||||
(
|
||||
"index",
|
||||
"Synapse",
|
||||
"Synapse Documentation",
|
||||
"TNG",
|
||||
"Synapse",
|
||||
"One line description of project.",
|
||||
"Miscellaneous",
|
||||
)
|
||||
]
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
#texinfo_appendices = []
|
||||
# texinfo_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
#texinfo_domain_indices = True
|
||||
# texinfo_domain_indices = True
|
||||
|
||||
# How to display URL addresses: 'footnote', 'no', or 'inline'.
|
||||
#texinfo_show_urls = 'footnote'
|
||||
# texinfo_show_urls = 'footnote'
|
||||
|
||||
# If true, do not generate a @detailmenu in the "Top" node's menu.
|
||||
#texinfo_no_detailmenu = False
|
||||
# texinfo_no_detailmenu = False
|
||||
|
||||
|
||||
# Example configuration for intersphinx: refer to the Python standard library.
|
||||
intersphinx_mapping = {'http://docs.python.org/': None}
|
||||
intersphinx_mapping = {"http://docs.python.org/": None}
|
||||
|
||||
napoleon_include_special_with_doc = True
|
||||
napoleon_use_ivar = True
|
||||
|
|
|
@ -28,3 +28,22 @@
|
|||
directory = "misc"
|
||||
name = "Internal Changes"
|
||||
showcontent = true
|
||||
|
||||
[tool.black]
|
||||
target-version = ['py34']
|
||||
exclude = '''
|
||||
|
||||
(
|
||||
/(
|
||||
\.eggs # exclude a few common directories in the
|
||||
| \.git # root of the project
|
||||
| \.tox
|
||||
| \.venv
|
||||
| _build
|
||||
| _trial_temp.*
|
||||
| build
|
||||
| dist
|
||||
| debian
|
||||
)/
|
||||
)
|
||||
'''
|
||||
|
|
|
@ -39,11 +39,11 @@ def check_auth(auth, auth_chain, events):
|
|||
print("Success:", e.event_id, e.type, e.state_key)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser()
|
||||
|
||||
parser.add_argument(
|
||||
'json', nargs='?', type=argparse.FileType('r'), default=sys.stdin
|
||||
"json", nargs="?", type=argparse.FileType("r"), default=sys.stdin
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
|
|
@ -30,7 +30,7 @@ class dictobj(dict):
|
|||
def main():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument(
|
||||
"input_json", nargs="?", type=argparse.FileType('r'), default=sys.stdin
|
||||
"input_json", nargs="?", type=argparse.FileType("r"), default=sys.stdin
|
||||
)
|
||||
args = parser.parse_args()
|
||||
logging.basicConfig()
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
|
||||
import argparse
|
||||
import json
|
||||
import logging
|
||||
|
@ -40,7 +39,7 @@ def main():
|
|||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("signature_name")
|
||||
parser.add_argument(
|
||||
"input_json", nargs="?", type=argparse.FileType('r'), default=sys.stdin
|
||||
"input_json", nargs="?", type=argparse.FileType("r"), default=sys.stdin
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
@ -69,5 +68,5 @@ def main():
|
|||
print("FAIL %s" % (key_id,))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
|
@ -116,5 +116,5 @@ def main():
|
|||
connection.commit()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
|
@ -19,10 +19,10 @@ class DefinitionVisitor(ast.NodeVisitor):
|
|||
self.names = {}
|
||||
self.attrs = set()
|
||||
self.definitions = {
|
||||
'def': self.functions,
|
||||
'class': self.classes,
|
||||
'names': self.names,
|
||||
'attrs': self.attrs,
|
||||
"def": self.functions,
|
||||
"class": self.classes,
|
||||
"names": self.names,
|
||||
"attrs": self.attrs,
|
||||
}
|
||||
|
||||
def visit_Name(self, node):
|
||||
|
@ -47,23 +47,23 @@ class DefinitionVisitor(ast.NodeVisitor):
|
|||
|
||||
|
||||
def non_empty(defs):
|
||||
functions = {name: non_empty(f) for name, f in defs['def'].items()}
|
||||
classes = {name: non_empty(f) for name, f in defs['class'].items()}
|
||||
functions = {name: non_empty(f) for name, f in defs["def"].items()}
|
||||
classes = {name: non_empty(f) for name, f in defs["class"].items()}
|
||||
result = {}
|
||||
if functions:
|
||||
result['def'] = functions
|
||||
result["def"] = functions
|
||||
if classes:
|
||||
result['class'] = classes
|
||||
names = defs['names']
|
||||
result["class"] = classes
|
||||
names = defs["names"]
|
||||
uses = []
|
||||
for name in names.get('Load', ()):
|
||||
if name not in names.get('Param', ()) and name not in names.get('Store', ()):
|
||||
for name in names.get("Load", ()):
|
||||
if name not in names.get("Param", ()) and name not in names.get("Store", ()):
|
||||
uses.append(name)
|
||||
uses.extend(defs['attrs'])
|
||||
uses.extend(defs["attrs"])
|
||||
if uses:
|
||||
result['uses'] = uses
|
||||
result['names'] = names
|
||||
result['attrs'] = defs['attrs']
|
||||
result["uses"] = uses
|
||||
result["names"] = names
|
||||
result["attrs"] = defs["attrs"]
|
||||
return result
|
||||
|
||||
|
||||
|
@ -81,33 +81,33 @@ def definitions_in_file(filepath):
|
|||
|
||||
|
||||
def defined_names(prefix, defs, names):
|
||||
for name, funcs in defs.get('def', {}).items():
|
||||
names.setdefault(name, {'defined': []})['defined'].append(prefix + name)
|
||||
for name, funcs in defs.get("def", {}).items():
|
||||
names.setdefault(name, {"defined": []})["defined"].append(prefix + name)
|
||||
defined_names(prefix + name + ".", funcs, names)
|
||||
|
||||
for name, funcs in defs.get('class', {}).items():
|
||||
names.setdefault(name, {'defined': []})['defined'].append(prefix + name)
|
||||
for name, funcs in defs.get("class", {}).items():
|
||||
names.setdefault(name, {"defined": []})["defined"].append(prefix + name)
|
||||
defined_names(prefix + name + ".", funcs, names)
|
||||
|
||||
|
||||
def used_names(prefix, item, defs, names):
|
||||
for name, funcs in defs.get('def', {}).items():
|
||||
for name, funcs in defs.get("def", {}).items():
|
||||
used_names(prefix + name + ".", name, funcs, names)
|
||||
|
||||
for name, funcs in defs.get('class', {}).items():
|
||||
for name, funcs in defs.get("class", {}).items():
|
||||
used_names(prefix + name + ".", name, funcs, names)
|
||||
|
||||
path = prefix.rstrip('.')
|
||||
for used in defs.get('uses', ()):
|
||||
path = prefix.rstrip(".")
|
||||
for used in defs.get("uses", ()):
|
||||
if used in names:
|
||||
if item:
|
||||
names[item].setdefault('uses', []).append(used)
|
||||
names[used].setdefault('used', {}).setdefault(item, []).append(path)
|
||||
names[item].setdefault("uses", []).append(used)
|
||||
names[used].setdefault("used", {}).setdefault(item, []).append(path)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
|
||||
parser = argparse.ArgumentParser(description='Find definitions.')
|
||||
parser = argparse.ArgumentParser(description="Find definitions.")
|
||||
parser.add_argument(
|
||||
"--unused", action="store_true", help="Only list unused definitions"
|
||||
)
|
||||
|
@ -119,7 +119,7 @@ if __name__ == '__main__':
|
|||
)
|
||||
parser.add_argument(
|
||||
"directories",
|
||||
nargs='+',
|
||||
nargs="+",
|
||||
metavar="DIR",
|
||||
help="Directories to search for definitions",
|
||||
)
|
||||
|
@ -164,7 +164,7 @@ if __name__ == '__main__':
|
|||
continue
|
||||
if ignore and any(pattern.match(name) for pattern in ignore):
|
||||
continue
|
||||
if args.unused and definition.get('used'):
|
||||
if args.unused and definition.get("used"):
|
||||
continue
|
||||
result[name] = definition
|
||||
|
||||
|
@ -196,9 +196,9 @@ if __name__ == '__main__':
|
|||
continue
|
||||
result[name] = definition
|
||||
|
||||
if args.format == 'yaml':
|
||||
if args.format == "yaml":
|
||||
yaml.dump(result, sys.stdout, default_flow_style=False)
|
||||
elif args.format == 'dot':
|
||||
elif args.format == "dot":
|
||||
print("digraph {")
|
||||
for name, entry in result.items():
|
||||
print(name)
|
||||
|
|
|
@ -63,7 +63,7 @@ def encode_canonical_json(value):
|
|||
# Encode code-points outside of ASCII as UTF-8 rather than \u escapes
|
||||
ensure_ascii=False,
|
||||
# Remove unecessary white space.
|
||||
separators=(',', ':'),
|
||||
separators=(",", ":"),
|
||||
# Sort the keys of dictionaries.
|
||||
sort_keys=True,
|
||||
# Encode the resulting unicode as UTF-8 bytes.
|
||||
|
@ -145,7 +145,7 @@ def request_json(method, origin_name, origin_key, destination, path, content):
|
|||
authorization_headers = []
|
||||
|
||||
for key, sig in signed_json["signatures"][origin_name].items():
|
||||
header = "X-Matrix origin=%s,key=\"%s\",sig=\"%s\"" % (origin_name, key, sig)
|
||||
header = 'X-Matrix origin=%s,key="%s",sig="%s"' % (origin_name, key, sig)
|
||||
authorization_headers.append(header.encode("ascii"))
|
||||
print("Authorization: %s" % header, file=sys.stderr)
|
||||
|
||||
|
@ -161,11 +161,7 @@ def request_json(method, origin_name, origin_key, destination, path, content):
|
|||
headers["Content-Type"] = "application/json"
|
||||
|
||||
result = s.request(
|
||||
method=method,
|
||||
url=dest,
|
||||
headers=headers,
|
||||
verify=False,
|
||||
data=content,
|
||||
method=method, url=dest, headers=headers, verify=False, data=content
|
||||
)
|
||||
sys.stderr.write("Status Code: %d\n" % (result.status_code,))
|
||||
return result.json()
|
||||
|
@ -241,18 +237,18 @@ def main():
|
|||
|
||||
|
||||
def read_args_from_config(args):
|
||||
with open(args.config, 'r') as fh:
|
||||
with open(args.config, "r") as fh:
|
||||
config = yaml.safe_load(fh)
|
||||
if not args.server_name:
|
||||
args.server_name = config['server_name']
|
||||
args.server_name = config["server_name"]
|
||||
if not args.signing_key_path:
|
||||
args.signing_key_path = config['signing_key_path']
|
||||
args.signing_key_path = config["signing_key_path"]
|
||||
|
||||
|
||||
class MatrixConnectionAdapter(HTTPAdapter):
|
||||
@staticmethod
|
||||
def lookup(s, skip_well_known=False):
|
||||
if s[-1] == ']':
|
||||
if s[-1] == "]":
|
||||
# ipv6 literal (with no port)
|
||||
return s, 8448
|
||||
|
||||
|
@ -268,9 +264,7 @@ class MatrixConnectionAdapter(HTTPAdapter):
|
|||
if not skip_well_known:
|
||||
well_known = MatrixConnectionAdapter.get_well_known(s)
|
||||
if well_known:
|
||||
return MatrixConnectionAdapter.lookup(
|
||||
well_known, skip_well_known=True
|
||||
)
|
||||
return MatrixConnectionAdapter.lookup(well_known, skip_well_known=True)
|
||||
|
||||
try:
|
||||
srv = srvlookup.lookup("matrix", "tcp", s)[0]
|
||||
|
@ -280,8 +274,8 @@ class MatrixConnectionAdapter(HTTPAdapter):
|
|||
|
||||
@staticmethod
|
||||
def get_well_known(server_name):
|
||||
uri = "https://%s/.well-known/matrix/server" % (server_name, )
|
||||
print("fetching %s" % (uri, ), file=sys.stderr)
|
||||
uri = "https://%s/.well-known/matrix/server" % (server_name,)
|
||||
print("fetching %s" % (uri,), file=sys.stderr)
|
||||
|
||||
try:
|
||||
resp = requests.get(uri)
|
||||
|
@ -294,12 +288,12 @@ class MatrixConnectionAdapter(HTTPAdapter):
|
|||
raise Exception("not a dict")
|
||||
if "m.server" not in parsed_well_known:
|
||||
raise Exception("Missing key 'm.server'")
|
||||
new_name = parsed_well_known['m.server']
|
||||
print("well-known lookup gave %s" % (new_name, ), file=sys.stderr)
|
||||
new_name = parsed_well_known["m.server"]
|
||||
print("well-known lookup gave %s" % (new_name,), file=sys.stderr)
|
||||
return new_name
|
||||
|
||||
except Exception as e:
|
||||
print("Invalid response from %s: %s" % (uri, e, ), file=sys.stderr)
|
||||
print("Invalid response from %s: %s" % (uri, e), file=sys.stderr)
|
||||
return None
|
||||
|
||||
def get_connection(self, url, proxies=None):
|
||||
|
|
|
@ -79,5 +79,5 @@ def main():
|
|||
conn.commit()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
|
@ -35,11 +35,11 @@ def find_patterns_in_file(filepath):
|
|||
find_patterns_in_code(f.read())
|
||||
|
||||
|
||||
parser = argparse.ArgumentParser(description='Find url patterns.')
|
||||
parser = argparse.ArgumentParser(description="Find url patterns.")
|
||||
|
||||
parser.add_argument(
|
||||
"directories",
|
||||
nargs='+',
|
||||
nargs="+",
|
||||
metavar="DIR",
|
||||
help="Directories to search for definitions",
|
||||
)
|
||||
|
|
|
@ -63,5 +63,5 @@ def main():
|
|||
streams[update.name] = update.position
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
|
@ -24,14 +24,14 @@ if __name__ == "__main__":
|
|||
parser = argparse.ArgumentParser()
|
||||
|
||||
parser.add_argument(
|
||||
"-o", "--output_file",
|
||||
|
||||
type=argparse.FileType('w'),
|
||||
"-o",
|
||||
"--output_file",
|
||||
type=argparse.FileType("w"),
|
||||
default=sys.stdout,
|
||||
help="Where to write the output to",
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
key_id = "a_" + random_string(4)
|
||||
key = generate_signing_key(key_id),
|
||||
key = (generate_signing_key(key_id),)
|
||||
write_signing_keys(args.output_file, key)
|
||||
|
|
|
@ -50,7 +50,7 @@ def main(src_repo, dest_repo):
|
|||
dest_paths = MediaFilePaths(dest_repo)
|
||||
for line in sys.stdin:
|
||||
line = line.strip()
|
||||
parts = line.split('|')
|
||||
parts = line.split("|")
|
||||
if len(parts) != 2:
|
||||
print("Unable to parse input line %s" % line, file=sys.stderr)
|
||||
exit(1)
|
||||
|
@ -107,7 +107,7 @@ if __name__ == "__main__":
|
|||
parser = argparse.ArgumentParser(
|
||||
description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter
|
||||
)
|
||||
parser.add_argument("-v", action='store_true', help='enable debug logging')
|
||||
parser.add_argument("-v", action="store_true", help="enable debug logging")
|
||||
parser.add_argument("src_repo", help="Path to source content repo")
|
||||
parser.add_argument("dest_repo", help="Path to source content repo")
|
||||
args = parser.parse_args()
|
||||
|
|
|
@ -15,18 +15,17 @@ ignore =
|
|||
tox.ini
|
||||
|
||||
[flake8]
|
||||
max-line-length = 90
|
||||
|
||||
# see https://pycodestyle.readthedocs.io/en/latest/intro.html#error-codes
|
||||
# for error codes. The ones we ignore are:
|
||||
# W503: line break before binary operator
|
||||
# W504: line break after binary operator
|
||||
# E203: whitespace before ':' (which is contrary to pep8?)
|
||||
# E731: do not assign a lambda expression, use a def
|
||||
ignore=W503,W504,E203,E731
|
||||
# E501: Line too long (black enforces this for us)
|
||||
ignore=W503,W504,E203,E731,E501
|
||||
|
||||
[isort]
|
||||
line_length = 89
|
||||
line_length = 88
|
||||
not_skip = __init__.py
|
||||
sections=FUTURE,STDLIB,COMPAT,THIRDPARTY,TWISTED,FIRSTPARTY,TESTS,LOCALFOLDER
|
||||
default_section=THIRDPARTY
|
||||
|
|
31
setup.py
31
setup.py
|
@ -60,9 +60,12 @@ class TestCommand(Command):
|
|||
pass
|
||||
|
||||
def run(self):
|
||||
print ("""Synapse's tests cannot be run via setup.py. To run them, try:
|
||||
print(
|
||||
"""Synapse's tests cannot be run via setup.py. To run them, try:
|
||||
PYTHONPATH="." trial tests
|
||||
""")
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
def read_file(path_segments):
|
||||
"""Read a file from the package. Takes a list of strings to join to
|
||||
|
@ -84,9 +87,9 @@ version = exec_file(("synapse", "__init__.py"))["__version__"]
|
|||
dependencies = exec_file(("synapse", "python_dependencies.py"))
|
||||
long_description = read_file(("README.rst",))
|
||||
|
||||
REQUIREMENTS = dependencies['REQUIREMENTS']
|
||||
CONDITIONAL_REQUIREMENTS = dependencies['CONDITIONAL_REQUIREMENTS']
|
||||
ALL_OPTIONAL_REQUIREMENTS = dependencies['ALL_OPTIONAL_REQUIREMENTS']
|
||||
REQUIREMENTS = dependencies["REQUIREMENTS"]
|
||||
CONDITIONAL_REQUIREMENTS = dependencies["CONDITIONAL_REQUIREMENTS"]
|
||||
ALL_OPTIONAL_REQUIREMENTS = dependencies["ALL_OPTIONAL_REQUIREMENTS"]
|
||||
|
||||
# Make `pip install matrix-synapse[all]` install all the optional dependencies.
|
||||
CONDITIONAL_REQUIREMENTS["all"] = list(ALL_OPTIONAL_REQUIREMENTS)
|
||||
|
@ -102,16 +105,16 @@ setup(
|
|||
include_package_data=True,
|
||||
zip_safe=False,
|
||||
long_description=long_description,
|
||||
python_requires='~=3.5',
|
||||
python_requires="~=3.5",
|
||||
classifiers=[
|
||||
'Development Status :: 5 - Production/Stable',
|
||||
'Topic :: Communications :: Chat',
|
||||
'License :: OSI Approved :: Apache Software License',
|
||||
'Programming Language :: Python :: 3 :: Only',
|
||||
'Programming Language :: Python :: 3.5',
|
||||
'Programming Language :: Python :: 3.6',
|
||||
'Programming Language :: Python :: 3.7',
|
||||
"Development Status :: 5 - Production/Stable",
|
||||
"Topic :: Communications :: Chat",
|
||||
"License :: OSI Approved :: Apache Software License",
|
||||
"Programming Language :: Python :: 3 :: Only",
|
||||
"Programming Language :: Python :: 3.5",
|
||||
"Programming Language :: Python :: 3.6",
|
||||
"Programming Language :: Python :: 3.7",
|
||||
],
|
||||
scripts=["synctl"] + glob.glob("scripts/*"),
|
||||
cmdclass={'test': TestCommand},
|
||||
cmdclass={"test": TestCommand},
|
||||
)
|
||||
|
|
|
@ -28,6 +28,7 @@ try:
|
|||
from twisted.internet import protocol
|
||||
from twisted.internet.protocol import Factory
|
||||
from twisted.names.dns import DNSDatagramProtocol
|
||||
|
||||
protocol.Factory.noisy = False
|
||||
Factory.noisy = False
|
||||
DNSDatagramProtocol.noisy = False
|
||||
|
|
|
@ -57,18 +57,18 @@ def request_registration(
|
|||
|
||||
nonce = r.json()["nonce"]
|
||||
|
||||
mac = hmac.new(key=shared_secret.encode('utf8'), digestmod=hashlib.sha1)
|
||||
mac = hmac.new(key=shared_secret.encode("utf8"), digestmod=hashlib.sha1)
|
||||
|
||||
mac.update(nonce.encode('utf8'))
|
||||
mac.update(nonce.encode("utf8"))
|
||||
mac.update(b"\x00")
|
||||
mac.update(user.encode('utf8'))
|
||||
mac.update(user.encode("utf8"))
|
||||
mac.update(b"\x00")
|
||||
mac.update(password.encode('utf8'))
|
||||
mac.update(password.encode("utf8"))
|
||||
mac.update(b"\x00")
|
||||
mac.update(b"admin" if admin else b"notadmin")
|
||||
if user_type:
|
||||
mac.update(b"\x00")
|
||||
mac.update(user_type.encode('utf8'))
|
||||
mac.update(user_type.encode("utf8"))
|
||||
|
||||
mac = mac.hexdigest()
|
||||
|
||||
|
@ -134,8 +134,9 @@ def register_new_user(user, password, server_location, shared_secret, admin, use
|
|||
else:
|
||||
admin = False
|
||||
|
||||
request_registration(user, password, server_location, shared_secret,
|
||||
bool(admin), user_type)
|
||||
request_registration(
|
||||
user, password, server_location, shared_secret, bool(admin), user_type
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
|
@ -189,7 +190,7 @@ def main():
|
|||
group.add_argument(
|
||||
"-c",
|
||||
"--config",
|
||||
type=argparse.FileType('r'),
|
||||
type=argparse.FileType("r"),
|
||||
help="Path to server config file. Used to read in shared secret.",
|
||||
)
|
||||
|
||||
|
@ -200,7 +201,7 @@ def main():
|
|||
parser.add_argument(
|
||||
"server_url",
|
||||
default="https://localhost:8448",
|
||||
nargs='?',
|
||||
nargs="?",
|
||||
help="URL to use to talk to the home server. Defaults to "
|
||||
" 'https://localhost:8448'.",
|
||||
)
|
||||
|
@ -220,8 +221,9 @@ def main():
|
|||
if args.admin or args.no_admin:
|
||||
admin = args.admin
|
||||
|
||||
register_new_user(args.user, args.password, args.server_url, secret,
|
||||
admin, args.user_type)
|
||||
register_new_user(
|
||||
args.user, args.password, args.server_url, secret, admin, args.user_type
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
|
@ -36,8 +36,11 @@ logger = logging.getLogger(__name__)
|
|||
|
||||
|
||||
AuthEventTypes = (
|
||||
EventTypes.Create, EventTypes.Member, EventTypes.PowerLevels,
|
||||
EventTypes.JoinRules, EventTypes.RoomHistoryVisibility,
|
||||
EventTypes.Create,
|
||||
EventTypes.Member,
|
||||
EventTypes.PowerLevels,
|
||||
EventTypes.JoinRules,
|
||||
EventTypes.RoomHistoryVisibility,
|
||||
EventTypes.ThirdPartyInvite,
|
||||
)
|
||||
|
||||
|
@ -54,6 +57,7 @@ class Auth(object):
|
|||
FIXME: This class contains a mix of functions for authenticating users
|
||||
of our client-server API and authenticating events added to room graphs.
|
||||
"""
|
||||
|
||||
def __init__(self, hs):
|
||||
self.hs = hs
|
||||
self.clock = hs.get_clock()
|
||||
|
@ -70,15 +74,12 @@ class Auth(object):
|
|||
def check_from_context(self, room_version, event, context, do_sig_check=True):
|
||||
prev_state_ids = yield context.get_prev_state_ids(self.store)
|
||||
auth_events_ids = yield self.compute_auth_events(
|
||||
event, prev_state_ids, for_verification=True,
|
||||
event, prev_state_ids, for_verification=True
|
||||
)
|
||||
auth_events = yield self.store.get_events(auth_events_ids)
|
||||
auth_events = {
|
||||
(e.type, e.state_key): e for e in itervalues(auth_events)
|
||||
}
|
||||
auth_events = {(e.type, e.state_key): e for e in itervalues(auth_events)}
|
||||
self.check(
|
||||
room_version, event,
|
||||
auth_events=auth_events, do_sig_check=do_sig_check,
|
||||
room_version, event, auth_events=auth_events, do_sig_check=do_sig_check
|
||||
)
|
||||
|
||||
def check(self, room_version, event, auth_events, do_sig_check=True):
|
||||
|
@ -115,15 +116,10 @@ class Auth(object):
|
|||
the room.
|
||||
"""
|
||||
if current_state:
|
||||
member = current_state.get(
|
||||
(EventTypes.Member, user_id),
|
||||
None
|
||||
)
|
||||
member = current_state.get((EventTypes.Member, user_id), None)
|
||||
else:
|
||||
member = yield self.state.get_current_state(
|
||||
room_id=room_id,
|
||||
event_type=EventTypes.Member,
|
||||
state_key=user_id
|
||||
room_id=room_id, event_type=EventTypes.Member, state_key=user_id
|
||||
)
|
||||
|
||||
self._check_joined_room(member, user_id, room_id)
|
||||
|
@ -143,23 +139,17 @@ class Auth(object):
|
|||
the room. This will be the leave event if they have left the room.
|
||||
"""
|
||||
member = yield self.state.get_current_state(
|
||||
room_id=room_id,
|
||||
event_type=EventTypes.Member,
|
||||
state_key=user_id
|
||||
room_id=room_id, event_type=EventTypes.Member, state_key=user_id
|
||||
)
|
||||
membership = member.membership if member else None
|
||||
|
||||
if membership not in (Membership.JOIN, Membership.LEAVE):
|
||||
raise AuthError(403, "User %s not in room %s" % (
|
||||
user_id, room_id
|
||||
))
|
||||
raise AuthError(403, "User %s not in room %s" % (user_id, room_id))
|
||||
|
||||
if membership == Membership.LEAVE:
|
||||
forgot = yield self.store.did_forget(user_id, room_id)
|
||||
if forgot:
|
||||
raise AuthError(403, "User %s not in room %s" % (
|
||||
user_id, room_id
|
||||
))
|
||||
raise AuthError(403, "User %s not in room %s" % (user_id, room_id))
|
||||
|
||||
defer.returnValue(member)
|
||||
|
||||
|
@ -171,9 +161,9 @@ class Auth(object):
|
|||
|
||||
def _check_joined_room(self, member, user_id, room_id):
|
||||
if not member or member.membership != Membership.JOIN:
|
||||
raise AuthError(403, "User %s not in room %s (%s)" % (
|
||||
user_id, room_id, repr(member)
|
||||
))
|
||||
raise AuthError(
|
||||
403, "User %s not in room %s (%s)" % (user_id, room_id, repr(member))
|
||||
)
|
||||
|
||||
def can_federate(self, event, auth_events):
|
||||
creation_event = auth_events.get((EventTypes.Create, ""))
|
||||
|
@ -185,11 +175,7 @@ class Auth(object):
|
|||
|
||||
@defer.inlineCallbacks
|
||||
def get_user_by_req(
|
||||
self,
|
||||
request,
|
||||
allow_guest=False,
|
||||
rights="access",
|
||||
allow_expired=False,
|
||||
self, request, allow_guest=False, rights="access", allow_expired=False
|
||||
):
|
||||
""" Get a registered user's ID.
|
||||
|
||||
|
@ -209,9 +195,8 @@ class Auth(object):
|
|||
try:
|
||||
ip_addr = self.hs.get_ip_from_request(request)
|
||||
user_agent = request.requestHeaders.getRawHeaders(
|
||||
b"User-Agent",
|
||||
default=[b""]
|
||||
)[0].decode('ascii', 'surrogateescape')
|
||||
b"User-Agent", default=[b""]
|
||||
)[0].decode("ascii", "surrogateescape")
|
||||
|
||||
access_token = self.get_access_token_from_request(
|
||||
request, self.TOKEN_NOT_FOUND_HTTP_STATUS
|
||||
|
@ -243,11 +228,12 @@ class Auth(object):
|
|||
if self._account_validity.enabled and not allow_expired:
|
||||
user_id = user.to_string()
|
||||
expiration_ts = yield self.store.get_expiration_ts_for_user(user_id)
|
||||
if expiration_ts is not None and self.clock.time_msec() >= expiration_ts:
|
||||
if (
|
||||
expiration_ts is not None
|
||||
and self.clock.time_msec() >= expiration_ts
|
||||
):
|
||||
raise AuthError(
|
||||
403,
|
||||
"User account has expired",
|
||||
errcode=Codes.EXPIRED_ACCOUNT,
|
||||
403, "User account has expired", errcode=Codes.EXPIRED_ACCOUNT
|
||||
)
|
||||
|
||||
# device_id may not be present if get_user_by_access_token has been
|
||||
|
@ -265,18 +251,23 @@ class Auth(object):
|
|||
|
||||
if is_guest and not allow_guest:
|
||||
raise AuthError(
|
||||
403, "Guest access not allowed", errcode=Codes.GUEST_ACCESS_FORBIDDEN
|
||||
403,
|
||||
"Guest access not allowed",
|
||||
errcode=Codes.GUEST_ACCESS_FORBIDDEN,
|
||||
)
|
||||
|
||||
request.authenticated_entity = user.to_string()
|
||||
|
||||
defer.returnValue(synapse.types.create_requester(
|
||||
user, token_id, is_guest, device_id, app_service=app_service)
|
||||
defer.returnValue(
|
||||
synapse.types.create_requester(
|
||||
user, token_id, is_guest, device_id, app_service=app_service
|
||||
)
|
||||
)
|
||||
except KeyError:
|
||||
raise AuthError(
|
||||
self.TOKEN_NOT_FOUND_HTTP_STATUS, "Missing access token.",
|
||||
errcode=Codes.MISSING_TOKEN
|
||||
self.TOKEN_NOT_FOUND_HTTP_STATUS,
|
||||
"Missing access token.",
|
||||
errcode=Codes.MISSING_TOKEN,
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
|
@ -297,20 +288,14 @@ class Auth(object):
|
|||
if b"user_id" not in request.args:
|
||||
defer.returnValue((app_service.sender, app_service))
|
||||
|
||||
user_id = request.args[b"user_id"][0].decode('utf8')
|
||||
user_id = request.args[b"user_id"][0].decode("utf8")
|
||||
if app_service.sender == user_id:
|
||||
defer.returnValue((app_service.sender, app_service))
|
||||
|
||||
if not app_service.is_interested_in_user(user_id):
|
||||
raise AuthError(
|
||||
403,
|
||||
"Application service cannot masquerade as this user."
|
||||
)
|
||||
raise AuthError(403, "Application service cannot masquerade as this user.")
|
||||
if not (yield self.store.get_user_by_id(user_id)):
|
||||
raise AuthError(
|
||||
403,
|
||||
"Application service has not registered this user"
|
||||
)
|
||||
raise AuthError(403, "Application service has not registered this user")
|
||||
defer.returnValue((user_id, app_service))
|
||||
|
||||
@defer.inlineCallbacks
|
||||
|
@ -368,13 +353,13 @@ class Auth(object):
|
|||
raise AuthError(
|
||||
self.TOKEN_NOT_FOUND_HTTP_STATUS,
|
||||
"Unknown user_id %s" % user_id,
|
||||
errcode=Codes.UNKNOWN_TOKEN
|
||||
errcode=Codes.UNKNOWN_TOKEN,
|
||||
)
|
||||
if not stored_user["is_guest"]:
|
||||
raise AuthError(
|
||||
self.TOKEN_NOT_FOUND_HTTP_STATUS,
|
||||
"Guest access token used for regular user",
|
||||
errcode=Codes.UNKNOWN_TOKEN
|
||||
errcode=Codes.UNKNOWN_TOKEN,
|
||||
)
|
||||
ret = {
|
||||
"user": user,
|
||||
|
@ -402,8 +387,9 @@ class Auth(object):
|
|||
) as e:
|
||||
logger.warning("Invalid macaroon in auth: %s %s", type(e), e)
|
||||
raise AuthError(
|
||||
self.TOKEN_NOT_FOUND_HTTP_STATUS, "Invalid macaroon passed.",
|
||||
errcode=Codes.UNKNOWN_TOKEN
|
||||
self.TOKEN_NOT_FOUND_HTTP_STATUS,
|
||||
"Invalid macaroon passed.",
|
||||
errcode=Codes.UNKNOWN_TOKEN,
|
||||
)
|
||||
|
||||
def _parse_and_validate_macaroon(self, token, rights="access"):
|
||||
|
@ -441,13 +427,13 @@ class Auth(object):
|
|||
guest = True
|
||||
|
||||
self.validate_macaroon(
|
||||
macaroon, rights, self.hs.config.expire_access_token,
|
||||
user_id=user_id,
|
||||
macaroon, rights, self.hs.config.expire_access_token, user_id=user_id
|
||||
)
|
||||
except (pymacaroons.exceptions.MacaroonException, TypeError, ValueError):
|
||||
raise AuthError(
|
||||
self.TOKEN_NOT_FOUND_HTTP_STATUS, "Invalid macaroon passed.",
|
||||
errcode=Codes.UNKNOWN_TOKEN
|
||||
self.TOKEN_NOT_FOUND_HTTP_STATUS,
|
||||
"Invalid macaroon passed.",
|
||||
errcode=Codes.UNKNOWN_TOKEN,
|
||||
)
|
||||
|
||||
if not has_expiry and rights == "access":
|
||||
|
@ -472,10 +458,11 @@ class Auth(object):
|
|||
user_prefix = "user_id = "
|
||||
for caveat in macaroon.caveats:
|
||||
if caveat.caveat_id.startswith(user_prefix):
|
||||
return caveat.caveat_id[len(user_prefix):]
|
||||
return caveat.caveat_id[len(user_prefix) :]
|
||||
raise AuthError(
|
||||
self.TOKEN_NOT_FOUND_HTTP_STATUS, "No user caveat in macaroon",
|
||||
errcode=Codes.UNKNOWN_TOKEN
|
||||
self.TOKEN_NOT_FOUND_HTTP_STATUS,
|
||||
"No user caveat in macaroon",
|
||||
errcode=Codes.UNKNOWN_TOKEN,
|
||||
)
|
||||
|
||||
def validate_macaroon(self, macaroon, type_string, verify_expiry, user_id):
|
||||
|
@ -522,7 +509,7 @@ class Auth(object):
|
|||
prefix = "time < "
|
||||
if not caveat.startswith(prefix):
|
||||
return False
|
||||
expiry = int(caveat[len(prefix):])
|
||||
expiry = int(caveat[len(prefix) :])
|
||||
now = self.hs.get_clock().time_msec()
|
||||
return now < expiry
|
||||
|
||||
|
@ -554,14 +541,12 @@ class Auth(object):
|
|||
raise AuthError(
|
||||
self.TOKEN_NOT_FOUND_HTTP_STATUS,
|
||||
"Unrecognised access token.",
|
||||
errcode=Codes.UNKNOWN_TOKEN
|
||||
errcode=Codes.UNKNOWN_TOKEN,
|
||||
)
|
||||
request.authenticated_entity = service.sender
|
||||
return defer.succeed(service)
|
||||
except KeyError:
|
||||
raise AuthError(
|
||||
self.TOKEN_NOT_FOUND_HTTP_STATUS, "Missing access token."
|
||||
)
|
||||
raise AuthError(self.TOKEN_NOT_FOUND_HTTP_STATUS, "Missing access token.")
|
||||
|
||||
def is_server_admin(self, user):
|
||||
""" Check if the given user is a local server admin.
|
||||
|
@ -581,19 +566,19 @@ class Auth(object):
|
|||
|
||||
auth_ids = []
|
||||
|
||||
key = (EventTypes.PowerLevels, "", )
|
||||
key = (EventTypes.PowerLevels, "")
|
||||
power_level_event_id = current_state_ids.get(key)
|
||||
|
||||
if power_level_event_id:
|
||||
auth_ids.append(power_level_event_id)
|
||||
|
||||
key = (EventTypes.JoinRules, "", )
|
||||
key = (EventTypes.JoinRules, "")
|
||||
join_rule_event_id = current_state_ids.get(key)
|
||||
|
||||
key = (EventTypes.Member, event.sender, )
|
||||
key = (EventTypes.Member, event.sender)
|
||||
member_event_id = current_state_ids.get(key)
|
||||
|
||||
key = (EventTypes.Create, "", )
|
||||
key = (EventTypes.Create, "")
|
||||
create_event_id = current_state_ids.get(key)
|
||||
if create_event_id:
|
||||
auth_ids.append(create_event_id)
|
||||
|
@ -619,7 +604,7 @@ class Auth(object):
|
|||
auth_ids.append(member_event_id)
|
||||
|
||||
if for_verification:
|
||||
key = (EventTypes.Member, event.state_key, )
|
||||
key = (EventTypes.Member, event.state_key)
|
||||
existing_event_id = current_state_ids.get(key)
|
||||
if existing_event_id:
|
||||
auth_ids.append(existing_event_id)
|
||||
|
@ -628,7 +613,7 @@ class Auth(object):
|
|||
if "third_party_invite" in event.content:
|
||||
key = (
|
||||
EventTypes.ThirdPartyInvite,
|
||||
event.content["third_party_invite"]["signed"]["token"]
|
||||
event.content["third_party_invite"]["signed"]["token"],
|
||||
)
|
||||
third_party_invite_id = current_state_ids.get(key)
|
||||
if third_party_invite_id:
|
||||
|
@ -684,7 +669,7 @@ class Auth(object):
|
|||
auth_events[(EventTypes.PowerLevels, "")] = power_level_event
|
||||
|
||||
send_level = event_auth.get_send_level(
|
||||
EventTypes.Aliases, "", power_level_event,
|
||||
EventTypes.Aliases, "", power_level_event
|
||||
)
|
||||
user_level = event_auth.get_user_power_level(user_id, auth_events)
|
||||
|
||||
|
@ -692,7 +677,7 @@ class Auth(object):
|
|||
raise AuthError(
|
||||
403,
|
||||
"This server requires you to be a moderator in the room to"
|
||||
" edit its room list entry"
|
||||
" edit its room list entry",
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
|
@ -742,7 +727,7 @@ class Auth(object):
|
|||
)
|
||||
parts = auth_headers[0].split(b" ")
|
||||
if parts[0] == b"Bearer" and len(parts) == 2:
|
||||
return parts[1].decode('ascii')
|
||||
return parts[1].decode("ascii")
|
||||
else:
|
||||
raise AuthError(
|
||||
token_not_found_http_status,
|
||||
|
@ -755,10 +740,10 @@ class Auth(object):
|
|||
raise AuthError(
|
||||
token_not_found_http_status,
|
||||
"Missing access token.",
|
||||
errcode=Codes.MISSING_TOKEN
|
||||
errcode=Codes.MISSING_TOKEN,
|
||||
)
|
||||
|
||||
return query_params[0].decode('ascii')
|
||||
return query_params[0].decode("ascii")
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def check_in_room_or_world_readable(self, room_id, user_id):
|
||||
|
@ -785,8 +770,8 @@ class Auth(object):
|
|||
room_id, EventTypes.RoomHistoryVisibility, ""
|
||||
)
|
||||
if (
|
||||
visibility and
|
||||
visibility.content["history_visibility"] == "world_readable"
|
||||
visibility
|
||||
and visibility.content["history_visibility"] == "world_readable"
|
||||
):
|
||||
defer.returnValue((Membership.JOIN, None))
|
||||
return
|
||||
|
@ -820,10 +805,11 @@ class Auth(object):
|
|||
|
||||
if self.hs.config.hs_disabled:
|
||||
raise ResourceLimitError(
|
||||
403, self.hs.config.hs_disabled_message,
|
||||
403,
|
||||
self.hs.config.hs_disabled_message,
|
||||
errcode=Codes.RESOURCE_LIMIT_EXCEEDED,
|
||||
admin_contact=self.hs.config.admin_contact,
|
||||
limit_type=self.hs.config.hs_disabled_limit_type
|
||||
limit_type=self.hs.config.hs_disabled_limit_type,
|
||||
)
|
||||
if self.hs.config.limit_usage_by_mau is True:
|
||||
assert not (user_id and threepid)
|
||||
|
@ -848,8 +834,9 @@ class Auth(object):
|
|||
current_mau = yield self.store.get_monthly_active_count()
|
||||
if current_mau >= self.hs.config.max_mau_value:
|
||||
raise ResourceLimitError(
|
||||
403, "Monthly Active User Limit Exceeded",
|
||||
403,
|
||||
"Monthly Active User Limit Exceeded",
|
||||
admin_contact=self.hs.config.admin_contact,
|
||||
errcode=Codes.RESOURCE_LIMIT_EXCEEDED,
|
||||
limit_type="monthly_active_user"
|
||||
limit_type="monthly_active_user",
|
||||
)
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
"""Contains constants from the specification."""
|
||||
|
||||
# the "depth" field on events is limited to 2**63 - 1
|
||||
MAX_DEPTH = 2**63 - 1
|
||||
MAX_DEPTH = 2 ** 63 - 1
|
||||
|
||||
# the maximum length for a room alias is 255 characters
|
||||
MAX_ALIAS_LENGTH = 255
|
||||
|
@ -30,39 +30,41 @@ MAX_USERID_LENGTH = 255
|
|||
class Membership(object):
|
||||
|
||||
"""Represents the membership states of a user in a room."""
|
||||
INVITE = u"invite"
|
||||
JOIN = u"join"
|
||||
KNOCK = u"knock"
|
||||
LEAVE = u"leave"
|
||||
BAN = u"ban"
|
||||
|
||||
INVITE = "invite"
|
||||
JOIN = "join"
|
||||
KNOCK = "knock"
|
||||
LEAVE = "leave"
|
||||
BAN = "ban"
|
||||
LIST = (INVITE, JOIN, KNOCK, LEAVE, BAN)
|
||||
|
||||
|
||||
class PresenceState(object):
|
||||
"""Represents the presence state of a user."""
|
||||
OFFLINE = u"offline"
|
||||
UNAVAILABLE = u"unavailable"
|
||||
ONLINE = u"online"
|
||||
|
||||
OFFLINE = "offline"
|
||||
UNAVAILABLE = "unavailable"
|
||||
ONLINE = "online"
|
||||
|
||||
|
||||
class JoinRules(object):
|
||||
PUBLIC = u"public"
|
||||
KNOCK = u"knock"
|
||||
INVITE = u"invite"
|
||||
PRIVATE = u"private"
|
||||
PUBLIC = "public"
|
||||
KNOCK = "knock"
|
||||
INVITE = "invite"
|
||||
PRIVATE = "private"
|
||||
|
||||
|
||||
class LoginType(object):
|
||||
PASSWORD = u"m.login.password"
|
||||
EMAIL_IDENTITY = u"m.login.email.identity"
|
||||
MSISDN = u"m.login.msisdn"
|
||||
RECAPTCHA = u"m.login.recaptcha"
|
||||
TERMS = u"m.login.terms"
|
||||
DUMMY = u"m.login.dummy"
|
||||
PASSWORD = "m.login.password"
|
||||
EMAIL_IDENTITY = "m.login.email.identity"
|
||||
MSISDN = "m.login.msisdn"
|
||||
RECAPTCHA = "m.login.recaptcha"
|
||||
TERMS = "m.login.terms"
|
||||
DUMMY = "m.login.dummy"
|
||||
|
||||
# Only for C/S API v1
|
||||
APPLICATION_SERVICE = u"m.login.application_service"
|
||||
SHARED_SECRET = u"org.matrix.login.shared_secret"
|
||||
APPLICATION_SERVICE = "m.login.application_service"
|
||||
SHARED_SECRET = "org.matrix.login.shared_secret"
|
||||
|
||||
|
||||
class EventTypes(object):
|
||||
|
@ -118,6 +120,7 @@ class UserTypes(object):
|
|||
"""Allows for user type specific behaviour. With the benefit of hindsight
|
||||
'admin' and 'guest' users should also be UserTypes. Normal users are type None
|
||||
"""
|
||||
|
||||
SUPPORT = "support"
|
||||
ALL_USER_TYPES = (SUPPORT,)
|
||||
|
||||
|
@ -125,6 +128,7 @@ class UserTypes(object):
|
|||
class RelationTypes(object):
|
||||
"""The types of relations known to this server.
|
||||
"""
|
||||
|
||||
ANNOTATION = "m.annotation"
|
||||
REPLACE = "m.replace"
|
||||
REFERENCE = "m.reference"
|
||||
|
|
|
@ -70,6 +70,7 @@ class CodeMessageException(RuntimeError):
|
|||
code (int): HTTP error code
|
||||
msg (str): string describing the error
|
||||
"""
|
||||
|
||||
def __init__(self, code, msg):
|
||||
super(CodeMessageException, self).__init__("%d: %s" % (code, msg))
|
||||
self.code = code
|
||||
|
@ -83,6 +84,7 @@ class SynapseError(CodeMessageException):
|
|||
Attributes:
|
||||
errcode (str): Matrix error code e.g 'M_FORBIDDEN'
|
||||
"""
|
||||
|
||||
def __init__(self, code, msg, errcode=Codes.UNKNOWN):
|
||||
"""Constructs a synapse error.
|
||||
|
||||
|
@ -95,10 +97,7 @@ class SynapseError(CodeMessageException):
|
|||
self.errcode = errcode
|
||||
|
||||
def error_dict(self):
|
||||
return cs_error(
|
||||
self.msg,
|
||||
self.errcode,
|
||||
)
|
||||
return cs_error(self.msg, self.errcode)
|
||||
|
||||
|
||||
class ProxiedRequestError(SynapseError):
|
||||
|
@ -107,27 +106,23 @@ class ProxiedRequestError(SynapseError):
|
|||
Attributes:
|
||||
errcode (str): Matrix error code e.g 'M_FORBIDDEN'
|
||||
"""
|
||||
|
||||
def __init__(self, code, msg, errcode=Codes.UNKNOWN, additional_fields=None):
|
||||
super(ProxiedRequestError, self).__init__(
|
||||
code, msg, errcode
|
||||
)
|
||||
super(ProxiedRequestError, self).__init__(code, msg, errcode)
|
||||
if additional_fields is None:
|
||||
self._additional_fields = {}
|
||||
else:
|
||||
self._additional_fields = dict(additional_fields)
|
||||
|
||||
def error_dict(self):
|
||||
return cs_error(
|
||||
self.msg,
|
||||
self.errcode,
|
||||
**self._additional_fields
|
||||
)
|
||||
return cs_error(self.msg, self.errcode, **self._additional_fields)
|
||||
|
||||
|
||||
class ConsentNotGivenError(SynapseError):
|
||||
"""The error returned to the client when the user has not consented to the
|
||||
privacy policy.
|
||||
"""
|
||||
|
||||
def __init__(self, msg, consent_uri):
|
||||
"""Constructs a ConsentNotGivenError
|
||||
|
||||
|
@ -136,22 +131,17 @@ class ConsentNotGivenError(SynapseError):
|
|||
consent_url (str): The URL where the user can give their consent
|
||||
"""
|
||||
super(ConsentNotGivenError, self).__init__(
|
||||
code=http_client.FORBIDDEN,
|
||||
msg=msg,
|
||||
errcode=Codes.CONSENT_NOT_GIVEN
|
||||
code=http_client.FORBIDDEN, msg=msg, errcode=Codes.CONSENT_NOT_GIVEN
|
||||
)
|
||||
self._consent_uri = consent_uri
|
||||
|
||||
def error_dict(self):
|
||||
return cs_error(
|
||||
self.msg,
|
||||
self.errcode,
|
||||
consent_uri=self._consent_uri
|
||||
)
|
||||
return cs_error(self.msg, self.errcode, consent_uri=self._consent_uri)
|
||||
|
||||
|
||||
class RegistrationError(SynapseError):
|
||||
"""An error raised when a registration event fails."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
|
@ -190,15 +180,17 @@ class InteractiveAuthIncompleteError(Exception):
|
|||
result (dict): the server response to the request, which should be
|
||||
passed back to the client
|
||||
"""
|
||||
|
||||
def __init__(self, result):
|
||||
super(InteractiveAuthIncompleteError, self).__init__(
|
||||
"Interactive auth not yet complete",
|
||||
"Interactive auth not yet complete"
|
||||
)
|
||||
self.result = result
|
||||
|
||||
|
||||
class UnrecognizedRequestError(SynapseError):
|
||||
"""An error indicating we don't understand the request you're trying to make"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if "errcode" not in kwargs:
|
||||
kwargs["errcode"] = Codes.UNRECOGNIZED
|
||||
|
@ -207,21 +199,14 @@ class UnrecognizedRequestError(SynapseError):
|
|||
message = "Unrecognized request"
|
||||
else:
|
||||
message = args[0]
|
||||
super(UnrecognizedRequestError, self).__init__(
|
||||
400,
|
||||
message,
|
||||
**kwargs
|
||||
)
|
||||
super(UnrecognizedRequestError, self).__init__(400, message, **kwargs)
|
||||
|
||||
|
||||
class NotFoundError(SynapseError):
|
||||
"""An error indicating we can't find the thing you asked for"""
|
||||
|
||||
def __init__(self, msg="Not found", errcode=Codes.NOT_FOUND):
|
||||
super(NotFoundError, self).__init__(
|
||||
404,
|
||||
msg,
|
||||
errcode=errcode
|
||||
)
|
||||
super(NotFoundError, self).__init__(404, msg, errcode=errcode)
|
||||
|
||||
|
||||
class AuthError(SynapseError):
|
||||
|
@ -238,8 +223,11 @@ class ResourceLimitError(SynapseError):
|
|||
Any error raised when there is a problem with resource usage.
|
||||
For instance, the monthly active user limit for the server has been exceeded
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, code, msg,
|
||||
self,
|
||||
code,
|
||||
msg,
|
||||
errcode=Codes.RESOURCE_LIMIT_EXCEEDED,
|
||||
admin_contact=None,
|
||||
limit_type=None,
|
||||
|
@ -253,7 +241,7 @@ class ResourceLimitError(SynapseError):
|
|||
self.msg,
|
||||
self.errcode,
|
||||
admin_contact=self.admin_contact,
|
||||
limit_type=self.limit_type
|
||||
limit_type=self.limit_type,
|
||||
)
|
||||
|
||||
|
||||
|
@ -268,6 +256,7 @@ class EventSizeError(SynapseError):
|
|||
|
||||
class EventStreamError(SynapseError):
|
||||
"""An error raised when there a problem with the event stream."""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if "errcode" not in kwargs:
|
||||
kwargs["errcode"] = Codes.BAD_PAGINATION
|
||||
|
@ -276,47 +265,53 @@ class EventStreamError(SynapseError):
|
|||
|
||||
class LoginError(SynapseError):
|
||||
"""An error raised when there was a problem logging in."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class StoreError(SynapseError):
|
||||
"""An error raised when there was a problem storing some data."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class InvalidCaptchaError(SynapseError):
|
||||
def __init__(self, code=400, msg="Invalid captcha.", error_url=None,
|
||||
errcode=Codes.CAPTCHA_INVALID):
|
||||
def __init__(
|
||||
self,
|
||||
code=400,
|
||||
msg="Invalid captcha.",
|
||||
error_url=None,
|
||||
errcode=Codes.CAPTCHA_INVALID,
|
||||
):
|
||||
super(InvalidCaptchaError, self).__init__(code, msg, errcode)
|
||||
self.error_url = error_url
|
||||
|
||||
def error_dict(self):
|
||||
return cs_error(
|
||||
self.msg,
|
||||
self.errcode,
|
||||
error_url=self.error_url,
|
||||
)
|
||||
return cs_error(self.msg, self.errcode, error_url=self.error_url)
|
||||
|
||||
|
||||
class LimitExceededError(SynapseError):
|
||||
"""A client has sent too many requests and is being throttled.
|
||||
"""
|
||||
def __init__(self, code=429, msg="Too Many Requests", retry_after_ms=None,
|
||||
errcode=Codes.LIMIT_EXCEEDED):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
code=429,
|
||||
msg="Too Many Requests",
|
||||
retry_after_ms=None,
|
||||
errcode=Codes.LIMIT_EXCEEDED,
|
||||
):
|
||||
super(LimitExceededError, self).__init__(code, msg, errcode)
|
||||
self.retry_after_ms = retry_after_ms
|
||||
|
||||
def error_dict(self):
|
||||
return cs_error(
|
||||
self.msg,
|
||||
self.errcode,
|
||||
retry_after_ms=self.retry_after_ms,
|
||||
)
|
||||
return cs_error(self.msg, self.errcode, retry_after_ms=self.retry_after_ms)
|
||||
|
||||
|
||||
class RoomKeysVersionError(SynapseError):
|
||||
"""A client has tried to upload to a non-current version of the room_keys store
|
||||
"""
|
||||
|
||||
def __init__(self, current_version):
|
||||
"""
|
||||
Args:
|
||||
|
@ -331,6 +326,7 @@ class RoomKeysVersionError(SynapseError):
|
|||
class UnsupportedRoomVersionError(SynapseError):
|
||||
"""The client's request to create a room used a room version that the server does
|
||||
not support."""
|
||||
|
||||
def __init__(self):
|
||||
super(UnsupportedRoomVersionError, self).__init__(
|
||||
code=400,
|
||||
|
@ -354,22 +350,19 @@ class IncompatibleRoomVersionError(SynapseError):
|
|||
Unlike UnsupportedRoomVersionError, it is specific to the case of the make_join
|
||||
failing.
|
||||
"""
|
||||
|
||||
def __init__(self, room_version):
|
||||
super(IncompatibleRoomVersionError, self).__init__(
|
||||
code=400,
|
||||
msg="Your homeserver does not support the features required to "
|
||||
"join this room",
|
||||
"join this room",
|
||||
errcode=Codes.INCOMPATIBLE_ROOM_VERSION,
|
||||
)
|
||||
|
||||
self._room_version = room_version
|
||||
|
||||
def error_dict(self):
|
||||
return cs_error(
|
||||
self.msg,
|
||||
self.errcode,
|
||||
room_version=self._room_version,
|
||||
)
|
||||
return cs_error(self.msg, self.errcode, room_version=self._room_version)
|
||||
|
||||
|
||||
class RequestSendFailed(RuntimeError):
|
||||
|
@ -380,11 +373,11 @@ class RequestSendFailed(RuntimeError):
|
|||
networking (e.g. DNS failures, connection timeouts etc), versus unexpected
|
||||
errors (like programming errors).
|
||||
"""
|
||||
|
||||
def __init__(self, inner_exception, can_retry):
|
||||
super(RequestSendFailed, self).__init__(
|
||||
"Failed to send request: %s: %s" % (
|
||||
type(inner_exception).__name__, inner_exception,
|
||||
)
|
||||
"Failed to send request: %s: %s"
|
||||
% (type(inner_exception).__name__, inner_exception)
|
||||
)
|
||||
self.inner_exception = inner_exception
|
||||
self.can_retry = can_retry
|
||||
|
@ -428,7 +421,7 @@ class FederationError(RuntimeError):
|
|||
self.affected = affected
|
||||
self.source = source
|
||||
|
||||
msg = "%s %s: %s" % (level, code, reason,)
|
||||
msg = "%s %s: %s" % (level, code, reason)
|
||||
super(FederationError, self).__init__(msg)
|
||||
|
||||
def get_dict(self):
|
||||
|
@ -448,6 +441,7 @@ class HttpResponseException(CodeMessageException):
|
|||
Attributes:
|
||||
response (bytes): body of response
|
||||
"""
|
||||
|
||||
def __init__(self, code, msg, response):
|
||||
"""
|
||||
|
||||
|
@ -486,7 +480,7 @@ class HttpResponseException(CodeMessageException):
|
|||
if not isinstance(j, dict):
|
||||
j = {}
|
||||
|
||||
errcode = j.pop('errcode', Codes.UNKNOWN)
|
||||
errmsg = j.pop('error', self.msg)
|
||||
errcode = j.pop("errcode", Codes.UNKNOWN)
|
||||
errmsg = j.pop("error", self.msg)
|
||||
|
||||
return ProxiedRequestError(self.code, errmsg, errcode, j)
|
||||
|
|
|
@ -28,117 +28,55 @@ FILTER_SCHEMA = {
|
|||
"additionalProperties": False,
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"limit": {
|
||||
"type": "number"
|
||||
},
|
||||
"senders": {
|
||||
"$ref": "#/definitions/user_id_array"
|
||||
},
|
||||
"not_senders": {
|
||||
"$ref": "#/definitions/user_id_array"
|
||||
},
|
||||
"limit": {"type": "number"},
|
||||
"senders": {"$ref": "#/definitions/user_id_array"},
|
||||
"not_senders": {"$ref": "#/definitions/user_id_array"},
|
||||
# TODO: We don't limit event type values but we probably should...
|
||||
# check types are valid event types
|
||||
"types": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"not_types": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
"types": {"type": "array", "items": {"type": "string"}},
|
||||
"not_types": {"type": "array", "items": {"type": "string"}},
|
||||
},
|
||||
}
|
||||
|
||||
ROOM_FILTER_SCHEMA = {
|
||||
"additionalProperties": False,
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"not_rooms": {
|
||||
"$ref": "#/definitions/room_id_array"
|
||||
},
|
||||
"rooms": {
|
||||
"$ref": "#/definitions/room_id_array"
|
||||
},
|
||||
"ephemeral": {
|
||||
"$ref": "#/definitions/room_event_filter"
|
||||
},
|
||||
"include_leave": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"state": {
|
||||
"$ref": "#/definitions/room_event_filter"
|
||||
},
|
||||
"timeline": {
|
||||
"$ref": "#/definitions/room_event_filter"
|
||||
},
|
||||
"account_data": {
|
||||
"$ref": "#/definitions/room_event_filter"
|
||||
},
|
||||
}
|
||||
"not_rooms": {"$ref": "#/definitions/room_id_array"},
|
||||
"rooms": {"$ref": "#/definitions/room_id_array"},
|
||||
"ephemeral": {"$ref": "#/definitions/room_event_filter"},
|
||||
"include_leave": {"type": "boolean"},
|
||||
"state": {"$ref": "#/definitions/room_event_filter"},
|
||||
"timeline": {"$ref": "#/definitions/room_event_filter"},
|
||||
"account_data": {"$ref": "#/definitions/room_event_filter"},
|
||||
},
|
||||
}
|
||||
|
||||
ROOM_EVENT_FILTER_SCHEMA = {
|
||||
"additionalProperties": False,
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"limit": {
|
||||
"type": "number"
|
||||
},
|
||||
"senders": {
|
||||
"$ref": "#/definitions/user_id_array"
|
||||
},
|
||||
"not_senders": {
|
||||
"$ref": "#/definitions/user_id_array"
|
||||
},
|
||||
"types": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"not_types": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"rooms": {
|
||||
"$ref": "#/definitions/room_id_array"
|
||||
},
|
||||
"not_rooms": {
|
||||
"$ref": "#/definitions/room_id_array"
|
||||
},
|
||||
"contains_url": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"lazy_load_members": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"include_redundant_members": {
|
||||
"type": "boolean"
|
||||
},
|
||||
}
|
||||
"limit": {"type": "number"},
|
||||
"senders": {"$ref": "#/definitions/user_id_array"},
|
||||
"not_senders": {"$ref": "#/definitions/user_id_array"},
|
||||
"types": {"type": "array", "items": {"type": "string"}},
|
||||
"not_types": {"type": "array", "items": {"type": "string"}},
|
||||
"rooms": {"$ref": "#/definitions/room_id_array"},
|
||||
"not_rooms": {"$ref": "#/definitions/room_id_array"},
|
||||
"contains_url": {"type": "boolean"},
|
||||
"lazy_load_members": {"type": "boolean"},
|
||||
"include_redundant_members": {"type": "boolean"},
|
||||
},
|
||||
}
|
||||
|
||||
USER_ID_ARRAY_SCHEMA = {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string",
|
||||
"format": "matrix_user_id"
|
||||
}
|
||||
"items": {"type": "string", "format": "matrix_user_id"},
|
||||
}
|
||||
|
||||
ROOM_ID_ARRAY_SCHEMA = {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string",
|
||||
"format": "matrix_room_id"
|
||||
}
|
||||
"items": {"type": "string", "format": "matrix_room_id"},
|
||||
}
|
||||
|
||||
USER_FILTER_SCHEMA = {
|
||||
|
@ -150,22 +88,13 @@ USER_FILTER_SCHEMA = {
|
|||
"user_id_array": USER_ID_ARRAY_SCHEMA,
|
||||
"filter": FILTER_SCHEMA,
|
||||
"room_filter": ROOM_FILTER_SCHEMA,
|
||||
"room_event_filter": ROOM_EVENT_FILTER_SCHEMA
|
||||
"room_event_filter": ROOM_EVENT_FILTER_SCHEMA,
|
||||
},
|
||||
"properties": {
|
||||
"presence": {
|
||||
"$ref": "#/definitions/filter"
|
||||
},
|
||||
"account_data": {
|
||||
"$ref": "#/definitions/filter"
|
||||
},
|
||||
"room": {
|
||||
"$ref": "#/definitions/room_filter"
|
||||
},
|
||||
"event_format": {
|
||||
"type": "string",
|
||||
"enum": ["client", "federation"]
|
||||
},
|
||||
"presence": {"$ref": "#/definitions/filter"},
|
||||
"account_data": {"$ref": "#/definitions/filter"},
|
||||
"room": {"$ref": "#/definitions/room_filter"},
|
||||
"event_format": {"type": "string", "enum": ["client", "federation"]},
|
||||
"event_fields": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
|
@ -177,26 +106,25 @@ USER_FILTER_SCHEMA = {
|
|||
#
|
||||
# Note that because this is a regular expression, we have to escape
|
||||
# each backslash in the pattern.
|
||||
"pattern": r"^((?!\\\\).)*$"
|
||||
}
|
||||
}
|
||||
"pattern": r"^((?!\\\\).)*$",
|
||||
},
|
||||
},
|
||||
},
|
||||
"additionalProperties": False
|
||||
"additionalProperties": False,
|
||||
}
|
||||
|
||||
|
||||
@FormatChecker.cls_checks('matrix_room_id')
|
||||
@FormatChecker.cls_checks("matrix_room_id")
|
||||
def matrix_room_id_validator(room_id_str):
|
||||
return RoomID.from_string(room_id_str)
|
||||
|
||||
|
||||
@FormatChecker.cls_checks('matrix_user_id')
|
||||
@FormatChecker.cls_checks("matrix_user_id")
|
||||
def matrix_user_id_validator(user_id_str):
|
||||
return UserID.from_string(user_id_str)
|
||||
|
||||
|
||||
class Filtering(object):
|
||||
|
||||
def __init__(self, hs):
|
||||
super(Filtering, self).__init__()
|
||||
self.store = hs.get_datastore()
|
||||
|
@ -228,8 +156,9 @@ class Filtering(object):
|
|||
# individual top-level key e.g. public_user_data. Filters are made of
|
||||
# many definitions.
|
||||
try:
|
||||
jsonschema.validate(user_filter_json, USER_FILTER_SCHEMA,
|
||||
format_checker=FormatChecker())
|
||||
jsonschema.validate(
|
||||
user_filter_json, USER_FILTER_SCHEMA, format_checker=FormatChecker()
|
||||
)
|
||||
except jsonschema.ValidationError as e:
|
||||
raise SynapseError(400, str(e))
|
||||
|
||||
|
@ -240,10 +169,9 @@ class FilterCollection(object):
|
|||
|
||||
room_filter_json = self._filter_json.get("room", {})
|
||||
|
||||
self._room_filter = Filter({
|
||||
k: v for k, v in room_filter_json.items()
|
||||
if k in ("rooms", "not_rooms")
|
||||
})
|
||||
self._room_filter = Filter(
|
||||
{k: v for k, v in room_filter_json.items() if k in ("rooms", "not_rooms")}
|
||||
)
|
||||
|
||||
self._room_timeline_filter = Filter(room_filter_json.get("timeline", {}))
|
||||
self._room_state_filter = Filter(room_filter_json.get("state", {}))
|
||||
|
@ -252,9 +180,7 @@ class FilterCollection(object):
|
|||
self._presence_filter = Filter(filter_json.get("presence", {}))
|
||||
self._account_data = Filter(filter_json.get("account_data", {}))
|
||||
|
||||
self.include_leave = filter_json.get("room", {}).get(
|
||||
"include_leave", False
|
||||
)
|
||||
self.include_leave = filter_json.get("room", {}).get("include_leave", False)
|
||||
self.event_fields = filter_json.get("event_fields", [])
|
||||
self.event_format = filter_json.get("event_format", "client")
|
||||
|
||||
|
@ -299,22 +225,22 @@ class FilterCollection(object):
|
|||
|
||||
def blocks_all_presence(self):
|
||||
return (
|
||||
self._presence_filter.filters_all_types() or
|
||||
self._presence_filter.filters_all_senders()
|
||||
self._presence_filter.filters_all_types()
|
||||
or self._presence_filter.filters_all_senders()
|
||||
)
|
||||
|
||||
def blocks_all_room_ephemeral(self):
|
||||
return (
|
||||
self._room_ephemeral_filter.filters_all_types() or
|
||||
self._room_ephemeral_filter.filters_all_senders() or
|
||||
self._room_ephemeral_filter.filters_all_rooms()
|
||||
self._room_ephemeral_filter.filters_all_types()
|
||||
or self._room_ephemeral_filter.filters_all_senders()
|
||||
or self._room_ephemeral_filter.filters_all_rooms()
|
||||
)
|
||||
|
||||
def blocks_all_room_timeline(self):
|
||||
return (
|
||||
self._room_timeline_filter.filters_all_types() or
|
||||
self._room_timeline_filter.filters_all_senders() or
|
||||
self._room_timeline_filter.filters_all_rooms()
|
||||
self._room_timeline_filter.filters_all_types()
|
||||
or self._room_timeline_filter.filters_all_senders()
|
||||
or self._room_timeline_filter.filters_all_rooms()
|
||||
)
|
||||
|
||||
|
||||
|
@ -375,12 +301,7 @@ class Filter(object):
|
|||
# check if there is a string url field in the content for filtering purposes
|
||||
contains_url = isinstance(content.get("url"), text_type)
|
||||
|
||||
return self.check_fields(
|
||||
room_id,
|
||||
sender,
|
||||
ev_type,
|
||||
contains_url,
|
||||
)
|
||||
return self.check_fields(room_id, sender, ev_type, contains_url)
|
||||
|
||||
def check_fields(self, room_id, sender, event_type, contains_url):
|
||||
"""Checks whether the filter matches the given event fields.
|
||||
|
@ -391,7 +312,7 @@ class Filter(object):
|
|||
literal_keys = {
|
||||
"rooms": lambda v: room_id == v,
|
||||
"senders": lambda v: sender == v,
|
||||
"types": lambda v: _matches_wildcard(event_type, v)
|
||||
"types": lambda v: _matches_wildcard(event_type, v),
|
||||
}
|
||||
|
||||
for name, match_func in literal_keys.items():
|
||||
|
|
|
@ -44,29 +44,25 @@ class Ratelimiter(object):
|
|||
"""
|
||||
self.prune_message_counts(time_now_s)
|
||||
message_count, time_start, _ignored = self.message_counts.get(
|
||||
key, (0., time_now_s, None),
|
||||
key, (0.0, time_now_s, None)
|
||||
)
|
||||
time_delta = time_now_s - time_start
|
||||
sent_count = message_count - time_delta * rate_hz
|
||||
if sent_count < 0:
|
||||
allowed = True
|
||||
time_start = time_now_s
|
||||
message_count = 1.
|
||||
elif sent_count > burst_count - 1.:
|
||||
message_count = 1.0
|
||||
elif sent_count > burst_count - 1.0:
|
||||
allowed = False
|
||||
else:
|
||||
allowed = True
|
||||
message_count += 1
|
||||
|
||||
if update:
|
||||
self.message_counts[key] = (
|
||||
message_count, time_start, rate_hz
|
||||
)
|
||||
self.message_counts[key] = (message_count, time_start, rate_hz)
|
||||
|
||||
if rate_hz > 0:
|
||||
time_allowed = (
|
||||
time_start + (message_count - burst_count + 1) / rate_hz
|
||||
)
|
||||
time_allowed = time_start + (message_count - burst_count + 1) / rate_hz
|
||||
if time_allowed < time_now_s:
|
||||
time_allowed = time_now_s
|
||||
else:
|
||||
|
@ -76,9 +72,7 @@ class Ratelimiter(object):
|
|||
|
||||
def prune_message_counts(self, time_now_s):
|
||||
for key in list(self.message_counts.keys()):
|
||||
message_count, time_start, rate_hz = (
|
||||
self.message_counts[key]
|
||||
)
|
||||
message_count, time_start, rate_hz = self.message_counts[key]
|
||||
time_delta = time_now_s - time_start
|
||||
if message_count - time_delta * rate_hz > 0:
|
||||
break
|
||||
|
@ -92,5 +86,5 @@ class Ratelimiter(object):
|
|||
|
||||
if not allowed:
|
||||
raise LimitExceededError(
|
||||
retry_after_ms=int(1000 * (time_allowed - time_now_s)),
|
||||
retry_after_ms=int(1000 * (time_allowed - time_now_s))
|
||||
)
|
||||
|
|
|
@ -19,9 +19,10 @@ class EventFormatVersions(object):
|
|||
"""This is an internal enum for tracking the version of the event format,
|
||||
independently from the room version.
|
||||
"""
|
||||
V1 = 1 # $id:server event id format
|
||||
V2 = 2 # MSC1659-style $hash event id format: introduced for room v3
|
||||
V3 = 3 # MSC1884-style $hash format: introduced for room v4
|
||||
|
||||
V1 = 1 # $id:server event id format
|
||||
V2 = 2 # MSC1659-style $hash event id format: introduced for room v3
|
||||
V3 = 3 # MSC1884-style $hash format: introduced for room v4
|
||||
|
||||
|
||||
KNOWN_EVENT_FORMAT_VERSIONS = {
|
||||
|
@ -33,8 +34,9 @@ KNOWN_EVENT_FORMAT_VERSIONS = {
|
|||
|
||||
class StateResolutionVersions(object):
|
||||
"""Enum to identify the state resolution algorithms"""
|
||||
V1 = 1 # room v1 state res
|
||||
V2 = 2 # MSC1442 state res: room v2 and later
|
||||
|
||||
V1 = 1 # room v1 state res
|
||||
V2 = 2 # MSC1442 state res: room v2 and later
|
||||
|
||||
|
||||
class RoomDisposition(object):
|
||||
|
@ -46,10 +48,10 @@ class RoomDisposition(object):
|
|||
class RoomVersion(object):
|
||||
"""An object which describes the unique attributes of a room version."""
|
||||
|
||||
identifier = attr.ib() # str; the identifier for this version
|
||||
disposition = attr.ib() # str; one of the RoomDispositions
|
||||
event_format = attr.ib() # int; one of the EventFormatVersions
|
||||
state_res = attr.ib() # int; one of the StateResolutionVersions
|
||||
identifier = attr.ib() # str; the identifier for this version
|
||||
disposition = attr.ib() # str; one of the RoomDispositions
|
||||
event_format = attr.ib() # int; one of the EventFormatVersions
|
||||
state_res = attr.ib() # int; one of the StateResolutionVersions
|
||||
enforce_key_validity = attr.ib() # bool
|
||||
|
||||
|
||||
|
@ -92,11 +94,12 @@ class RoomVersions(object):
|
|||
|
||||
|
||||
KNOWN_ROOM_VERSIONS = {
|
||||
v.identifier: v for v in (
|
||||
v.identifier: v
|
||||
for v in (
|
||||
RoomVersions.V1,
|
||||
RoomVersions.V2,
|
||||
RoomVersions.V3,
|
||||
RoomVersions.V4,
|
||||
RoomVersions.V5,
|
||||
)
|
||||
} # type: dict[str, RoomVersion]
|
||||
} # type: dict[str, RoomVersion]
|
||||
|
|
|
@ -42,13 +42,9 @@ class ConsentURIBuilder(object):
|
|||
hs_config (synapse.config.homeserver.HomeServerConfig):
|
||||
"""
|
||||
if hs_config.form_secret is None:
|
||||
raise ConfigError(
|
||||
"form_secret not set in config",
|
||||
)
|
||||
raise ConfigError("form_secret not set in config")
|
||||
if hs_config.public_baseurl is None:
|
||||
raise ConfigError(
|
||||
"public_baseurl not set in config",
|
||||
)
|
||||
raise ConfigError("public_baseurl not set in config")
|
||||
|
||||
self._hmac_secret = hs_config.form_secret.encode("utf-8")
|
||||
self._public_baseurl = hs_config.public_baseurl
|
||||
|
@ -64,15 +60,10 @@ class ConsentURIBuilder(object):
|
|||
(str) the URI where the user can do consent
|
||||
"""
|
||||
mac = hmac.new(
|
||||
key=self._hmac_secret,
|
||||
msg=user_id.encode('ascii'),
|
||||
digestmod=sha256,
|
||||
key=self._hmac_secret, msg=user_id.encode("ascii"), digestmod=sha256
|
||||
).hexdigest()
|
||||
consent_uri = "%s_matrix/consent?%s" % (
|
||||
self._public_baseurl,
|
||||
urlencode({
|
||||
"u": user_id,
|
||||
"h": mac
|
||||
}),
|
||||
urlencode({"u": user_id, "h": mac}),
|
||||
)
|
||||
return consent_uri
|
||||
|
|
|
@ -43,7 +43,7 @@ def check_bind_error(e, address, bind_addresses):
|
|||
address (str): Address on which binding was attempted.
|
||||
bind_addresses (list): Addresses on which the service listens.
|
||||
"""
|
||||
if address == '0.0.0.0' and '::' in bind_addresses:
|
||||
logger.warn('Failed to listen on 0.0.0.0, continuing because listening on [::]')
|
||||
if address == "0.0.0.0" and "::" in bind_addresses:
|
||||
logger.warn("Failed to listen on 0.0.0.0, continuing because listening on [::]")
|
||||
else:
|
||||
raise e
|
||||
|
|
|
@ -75,14 +75,14 @@ def start_worker_reactor(appname, config):
|
|||
|
||||
|
||||
def start_reactor(
|
||||
appname,
|
||||
soft_file_limit,
|
||||
gc_thresholds,
|
||||
pid_file,
|
||||
daemonize,
|
||||
cpu_affinity,
|
||||
print_pidfile,
|
||||
logger,
|
||||
appname,
|
||||
soft_file_limit,
|
||||
gc_thresholds,
|
||||
pid_file,
|
||||
daemonize,
|
||||
cpu_affinity,
|
||||
print_pidfile,
|
||||
logger,
|
||||
):
|
||||
""" Run the reactor in the main process
|
||||
|
||||
|
@ -149,10 +149,10 @@ def start_reactor(
|
|||
def quit_with_error(error_string):
|
||||
message_lines = error_string.split("\n")
|
||||
line_length = max([len(l) for l in message_lines if len(l) < 80]) + 2
|
||||
sys.stderr.write("*" * line_length + '\n')
|
||||
sys.stderr.write("*" * line_length + "\n")
|
||||
for line in message_lines:
|
||||
sys.stderr.write(" %s\n" % (line.rstrip(),))
|
||||
sys.stderr.write("*" * line_length + '\n')
|
||||
sys.stderr.write("*" * line_length + "\n")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
|
@ -178,14 +178,7 @@ def listen_tcp(bind_addresses, port, factory, reactor=reactor, backlog=50):
|
|||
r = []
|
||||
for address in bind_addresses:
|
||||
try:
|
||||
r.append(
|
||||
reactor.listenTCP(
|
||||
port,
|
||||
factory,
|
||||
backlog,
|
||||
address
|
||||
)
|
||||
)
|
||||
r.append(reactor.listenTCP(port, factory, backlog, address))
|
||||
except error.CannotListenError as e:
|
||||
check_bind_error(e, address, bind_addresses)
|
||||
|
||||
|
@ -205,13 +198,7 @@ def listen_ssl(
|
|||
for address in bind_addresses:
|
||||
try:
|
||||
r.append(
|
||||
reactor.listenSSL(
|
||||
port,
|
||||
factory,
|
||||
context_factory,
|
||||
backlog,
|
||||
address
|
||||
)
|
||||
reactor.listenSSL(port, factory, context_factory, backlog, address)
|
||||
)
|
||||
except error.CannotListenError as e:
|
||||
check_bind_error(e, address, bind_addresses)
|
||||
|
@ -243,15 +230,13 @@ def refresh_certificate(hs):
|
|||
if isinstance(i.factory, TLSMemoryBIOFactory):
|
||||
addr = i.getHost()
|
||||
logger.info(
|
||||
"Replacing TLS context factory on [%s]:%i", addr.host, addr.port,
|
||||
"Replacing TLS context factory on [%s]:%i", addr.host, addr.port
|
||||
)
|
||||
# We want to replace TLS factories with a new one, with the new
|
||||
# TLS configuration. We do this by reaching in and pulling out
|
||||
# the wrappedFactory, and then re-wrapping it.
|
||||
i.factory = TLSMemoryBIOFactory(
|
||||
hs.tls_server_context_factory,
|
||||
False,
|
||||
i.factory.wrappedFactory
|
||||
hs.tls_server_context_factory, False, i.factory.wrappedFactory
|
||||
)
|
||||
logger.info("Context factories updated.")
|
||||
|
||||
|
@ -267,6 +252,7 @@ def start(hs, listeners=None):
|
|||
try:
|
||||
# Set up the SIGHUP machinery.
|
||||
if hasattr(signal, "SIGHUP"):
|
||||
|
||||
def handle_sighup(*args, **kwargs):
|
||||
for i in _sighup_callbacks:
|
||||
i(hs)
|
||||
|
@ -302,10 +288,8 @@ def setup_sentry(hs):
|
|||
return
|
||||
|
||||
import sentry_sdk
|
||||
sentry_sdk.init(
|
||||
dsn=hs.config.sentry_dsn,
|
||||
release=get_version_string(synapse),
|
||||
)
|
||||
|
||||
sentry_sdk.init(dsn=hs.config.sentry_dsn, release=get_version_string(synapse))
|
||||
|
||||
# We set some default tags that give some context to this instance
|
||||
with sentry_sdk.configure_scope() as scope:
|
||||
|
@ -326,7 +310,7 @@ def install_dns_limiter(reactor, max_dns_requests_in_flight=100):
|
|||
many DNS queries at once
|
||||
"""
|
||||
new_resolver = _LimitedHostnameResolver(
|
||||
reactor.nameResolver, max_dns_requests_in_flight,
|
||||
reactor.nameResolver, max_dns_requests_in_flight
|
||||
)
|
||||
|
||||
reactor.installNameResolver(new_resolver)
|
||||
|
@ -339,11 +323,17 @@ class _LimitedHostnameResolver(object):
|
|||
def __init__(self, resolver, max_dns_requests_in_flight):
|
||||
self._resolver = resolver
|
||||
self._limiter = Linearizer(
|
||||
name="dns_client_limiter", max_count=max_dns_requests_in_flight,
|
||||
name="dns_client_limiter", max_count=max_dns_requests_in_flight
|
||||
)
|
||||
|
||||
def resolveHostName(self, resolutionReceiver, hostName, portNumber=0,
|
||||
addressTypes=None, transportSemantics='TCP'):
|
||||
def resolveHostName(
|
||||
self,
|
||||
resolutionReceiver,
|
||||
hostName,
|
||||
portNumber=0,
|
||||
addressTypes=None,
|
||||
transportSemantics="TCP",
|
||||
):
|
||||
# We need this function to return `resolutionReceiver` so we do all the
|
||||
# actual logic involving deferreds in a separate function.
|
||||
|
||||
|
@ -363,8 +353,14 @@ class _LimitedHostnameResolver(object):
|
|||
return resolutionReceiver
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _resolve(self, resolutionReceiver, hostName, portNumber=0,
|
||||
addressTypes=None, transportSemantics='TCP'):
|
||||
def _resolve(
|
||||
self,
|
||||
resolutionReceiver,
|
||||
hostName,
|
||||
portNumber=0,
|
||||
addressTypes=None,
|
||||
transportSemantics="TCP",
|
||||
):
|
||||
|
||||
with (yield self._limiter.queue(())):
|
||||
# resolveHostName doesn't return a Deferred, so we need to hook into
|
||||
|
@ -374,8 +370,7 @@ class _LimitedHostnameResolver(object):
|
|||
receiver = _DeferredResolutionReceiver(resolutionReceiver, deferred)
|
||||
|
||||
self._resolver.resolveHostName(
|
||||
receiver, hostName, portNumber,
|
||||
addressTypes, transportSemantics,
|
||||
receiver, hostName, portNumber, addressTypes, transportSemantics
|
||||
)
|
||||
|
||||
yield deferred
|
||||
|
|
|
@ -44,7 +44,9 @@ logger = logging.getLogger("synapse.app.appservice")
|
|||
|
||||
|
||||
class AppserviceSlaveStore(
|
||||
DirectoryStore, SlavedEventStore, SlavedApplicationServiceStore,
|
||||
DirectoryStore,
|
||||
SlavedEventStore,
|
||||
SlavedApplicationServiceStore,
|
||||
SlavedRegistrationStore,
|
||||
):
|
||||
pass
|
||||
|
@ -74,7 +76,7 @@ class AppserviceServer(HomeServer):
|
|||
listener_config,
|
||||
root_resource,
|
||||
self.version_string,
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
logger.info("Synapse appservice now listening on port %d", port)
|
||||
|
@ -88,18 +90,19 @@ class AppserviceServer(HomeServer):
|
|||
listener["bind_addresses"],
|
||||
listener["port"],
|
||||
manhole(
|
||||
username="matrix",
|
||||
password="rabbithole",
|
||||
globals={"hs": self},
|
||||
)
|
||||
username="matrix", password="rabbithole", globals={"hs": self}
|
||||
),
|
||||
)
|
||||
elif listener["type"] == "metrics":
|
||||
if not self.get_config().enable_metrics:
|
||||
logger.warn(("Metrics listener configured, but "
|
||||
"enable_metrics is not True!"))
|
||||
logger.warn(
|
||||
(
|
||||
"Metrics listener configured, but "
|
||||
"enable_metrics is not True!"
|
||||
)
|
||||
)
|
||||
else:
|
||||
_base.listen_metrics(listener["bind_addresses"],
|
||||
listener["port"])
|
||||
_base.listen_metrics(listener["bind_addresses"], listener["port"])
|
||||
else:
|
||||
logger.warn("Unrecognized listener type: %s", listener["type"])
|
||||
|
||||
|
@ -132,9 +135,7 @@ class ASReplicationHandler(ReplicationClientHandler):
|
|||
|
||||
def start(config_options):
|
||||
try:
|
||||
config = HomeServerConfig.load_config(
|
||||
"Synapse appservice", config_options
|
||||
)
|
||||
config = HomeServerConfig.load_config("Synapse appservice", config_options)
|
||||
except ConfigError as e:
|
||||
sys.stderr.write("\n" + str(e) + "\n")
|
||||
sys.exit(1)
|
||||
|
@ -173,6 +174,6 @@ def start(config_options):
|
|||
_base.start_worker_reactor("synapse-appservice", config)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
with LoggingContext("main"):
|
||||
start(sys.argv[1:])
|
||||
|
|
|
@ -118,9 +118,7 @@ class ClientReaderServer(HomeServer):
|
|||
PushRuleRestServlet(self).register(resource)
|
||||
VersionsRestServlet().register(resource)
|
||||
|
||||
resources.update({
|
||||
"/_matrix/client": resource,
|
||||
})
|
||||
resources.update({"/_matrix/client": resource})
|
||||
|
||||
root_resource = create_resource_tree(resources, NoResource())
|
||||
|
||||
|
@ -133,7 +131,7 @@ class ClientReaderServer(HomeServer):
|
|||
listener_config,
|
||||
root_resource,
|
||||
self.version_string,
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
logger.info("Synapse client reader now listening on port %d", port)
|
||||
|
@ -147,18 +145,19 @@ class ClientReaderServer(HomeServer):
|
|||
listener["bind_addresses"],
|
||||
listener["port"],
|
||||
manhole(
|
||||
username="matrix",
|
||||
password="rabbithole",
|
||||
globals={"hs": self},
|
||||
)
|
||||
username="matrix", password="rabbithole", globals={"hs": self}
|
||||
),
|
||||
)
|
||||
elif listener["type"] == "metrics":
|
||||
if not self.get_config().enable_metrics:
|
||||
logger.warn(("Metrics listener configured, but "
|
||||
"enable_metrics is not True!"))
|
||||
logger.warn(
|
||||
(
|
||||
"Metrics listener configured, but "
|
||||
"enable_metrics is not True!"
|
||||
)
|
||||
)
|
||||
else:
|
||||
_base.listen_metrics(listener["bind_addresses"],
|
||||
listener["port"])
|
||||
_base.listen_metrics(listener["bind_addresses"], listener["port"])
|
||||
else:
|
||||
logger.warn("Unrecognized listener type: %s", listener["type"])
|
||||
|
||||
|
@ -170,9 +169,7 @@ class ClientReaderServer(HomeServer):
|
|||
|
||||
def start(config_options):
|
||||
try:
|
||||
config = HomeServerConfig.load_config(
|
||||
"Synapse client reader", config_options
|
||||
)
|
||||
config = HomeServerConfig.load_config("Synapse client reader", config_options)
|
||||
except ConfigError as e:
|
||||
sys.stderr.write("\n" + str(e) + "\n")
|
||||
sys.exit(1)
|
||||
|
@ -199,6 +196,6 @@ def start(config_options):
|
|||
_base.start_worker_reactor("synapse-client-reader", config)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
with LoggingContext("main"):
|
||||
start(sys.argv[1:])
|
||||
|
|
|
@ -109,12 +109,14 @@ class EventCreatorServer(HomeServer):
|
|||
ProfileAvatarURLRestServlet(self).register(resource)
|
||||
ProfileDisplaynameRestServlet(self).register(resource)
|
||||
ProfileRestServlet(self).register(resource)
|
||||
resources.update({
|
||||
"/_matrix/client/r0": resource,
|
||||
"/_matrix/client/unstable": resource,
|
||||
"/_matrix/client/v2_alpha": resource,
|
||||
"/_matrix/client/api/v1": resource,
|
||||
})
|
||||
resources.update(
|
||||
{
|
||||
"/_matrix/client/r0": resource,
|
||||
"/_matrix/client/unstable": resource,
|
||||
"/_matrix/client/v2_alpha": resource,
|
||||
"/_matrix/client/api/v1": resource,
|
||||
}
|
||||
)
|
||||
|
||||
root_resource = create_resource_tree(resources, NoResource())
|
||||
|
||||
|
@ -127,7 +129,7 @@ class EventCreatorServer(HomeServer):
|
|||
listener_config,
|
||||
root_resource,
|
||||
self.version_string,
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
logger.info("Synapse event creator now listening on port %d", port)
|
||||
|
@ -141,18 +143,19 @@ class EventCreatorServer(HomeServer):
|
|||
listener["bind_addresses"],
|
||||
listener["port"],
|
||||
manhole(
|
||||
username="matrix",
|
||||
password="rabbithole",
|
||||
globals={"hs": self},
|
||||
)
|
||||
username="matrix", password="rabbithole", globals={"hs": self}
|
||||
),
|
||||
)
|
||||
elif listener["type"] == "metrics":
|
||||
if not self.get_config().enable_metrics:
|
||||
logger.warn(("Metrics listener configured, but "
|
||||
"enable_metrics is not True!"))
|
||||
logger.warn(
|
||||
(
|
||||
"Metrics listener configured, but "
|
||||
"enable_metrics is not True!"
|
||||
)
|
||||
)
|
||||
else:
|
||||
_base.listen_metrics(listener["bind_addresses"],
|
||||
listener["port"])
|
||||
_base.listen_metrics(listener["bind_addresses"], listener["port"])
|
||||
else:
|
||||
logger.warn("Unrecognized listener type: %s", listener["type"])
|
||||
|
||||
|
@ -164,9 +167,7 @@ class EventCreatorServer(HomeServer):
|
|||
|
||||
def start(config_options):
|
||||
try:
|
||||
config = HomeServerConfig.load_config(
|
||||
"Synapse event creator", config_options
|
||||
)
|
||||
config = HomeServerConfig.load_config("Synapse event creator", config_options)
|
||||
except ConfigError as e:
|
||||
sys.stderr.write("\n" + str(e) + "\n")
|
||||
sys.exit(1)
|
||||
|
@ -198,6 +199,6 @@ def start(config_options):
|
|||
_base.start_worker_reactor("synapse-event-creator", config)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
with LoggingContext("main"):
|
||||
start(sys.argv[1:])
|
||||
|
|
|
@ -86,19 +86,18 @@ class FederationReaderServer(HomeServer):
|
|||
if name == "metrics":
|
||||
resources[METRICS_PREFIX] = MetricsResource(RegistryProxy)
|
||||
elif name == "federation":
|
||||
resources.update({
|
||||
FEDERATION_PREFIX: TransportLayerServer(self),
|
||||
})
|
||||
resources.update({FEDERATION_PREFIX: TransportLayerServer(self)})
|
||||
if name == "openid" and "federation" not in res["names"]:
|
||||
# Only load the openid resource separately if federation resource
|
||||
# is not specified since federation resource includes openid
|
||||
# resource.
|
||||
resources.update({
|
||||
FEDERATION_PREFIX: TransportLayerServer(
|
||||
self,
|
||||
servlet_groups=["openid"],
|
||||
),
|
||||
})
|
||||
resources.update(
|
||||
{
|
||||
FEDERATION_PREFIX: TransportLayerServer(
|
||||
self, servlet_groups=["openid"]
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
if name in ["keys", "federation"]:
|
||||
resources[SERVER_KEY_V2_PREFIX] = KeyApiV2Resource(self)
|
||||
|
@ -115,7 +114,7 @@ class FederationReaderServer(HomeServer):
|
|||
root_resource,
|
||||
self.version_string,
|
||||
),
|
||||
reactor=self.get_reactor()
|
||||
reactor=self.get_reactor(),
|
||||
)
|
||||
|
||||
logger.info("Synapse federation reader now listening on port %d", port)
|
||||
|
@ -129,18 +128,19 @@ class FederationReaderServer(HomeServer):
|
|||
listener["bind_addresses"],
|
||||
listener["port"],
|
||||
manhole(
|
||||
username="matrix",
|
||||
password="rabbithole",
|
||||
globals={"hs": self},
|
||||
)
|
||||
username="matrix", password="rabbithole", globals={"hs": self}
|
||||
),
|
||||
)
|
||||
elif listener["type"] == "metrics":
|
||||
if not self.get_config().enable_metrics:
|
||||
logger.warn(("Metrics listener configured, but "
|
||||
"enable_metrics is not True!"))
|
||||
logger.warn(
|
||||
(
|
||||
"Metrics listener configured, but "
|
||||
"enable_metrics is not True!"
|
||||
)
|
||||
)
|
||||
else:
|
||||
_base.listen_metrics(listener["bind_addresses"],
|
||||
listener["port"])
|
||||
_base.listen_metrics(listener["bind_addresses"], listener["port"])
|
||||
else:
|
||||
logger.warn("Unrecognized listener type: %s", listener["type"])
|
||||
|
||||
|
@ -181,6 +181,6 @@ def start(config_options):
|
|||
_base.start_worker_reactor("synapse-federation-reader", config)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
with LoggingContext("main"):
|
||||
start(sys.argv[1:])
|
||||
|
|
|
@ -52,8 +52,13 @@ logger = logging.getLogger("synapse.app.federation_sender")
|
|||
|
||||
|
||||
class FederationSenderSlaveStore(
|
||||
SlavedDeviceInboxStore, SlavedTransactionStore, SlavedReceiptsStore, SlavedEventStore,
|
||||
SlavedRegistrationStore, SlavedDeviceStore, SlavedPresenceStore,
|
||||
SlavedDeviceInboxStore,
|
||||
SlavedTransactionStore,
|
||||
SlavedReceiptsStore,
|
||||
SlavedEventStore,
|
||||
SlavedRegistrationStore,
|
||||
SlavedDeviceStore,
|
||||
SlavedPresenceStore,
|
||||
):
|
||||
def __init__(self, db_conn, hs):
|
||||
super(FederationSenderSlaveStore, self).__init__(db_conn, hs)
|
||||
|
@ -65,10 +70,7 @@ class FederationSenderSlaveStore(
|
|||
self.federation_out_pos_startup = self._get_federation_out_pos(db_conn)
|
||||
|
||||
def _get_federation_out_pos(self, db_conn):
|
||||
sql = (
|
||||
"SELECT stream_id FROM federation_stream_position"
|
||||
" WHERE type = ?"
|
||||
)
|
||||
sql = "SELECT stream_id FROM federation_stream_position" " WHERE type = ?"
|
||||
sql = self.database_engine.convert_param_style(sql)
|
||||
|
||||
txn = db_conn.cursor()
|
||||
|
@ -103,7 +105,7 @@ class FederationSenderServer(HomeServer):
|
|||
listener_config,
|
||||
root_resource,
|
||||
self.version_string,
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
logger.info("Synapse federation_sender now listening on port %d", port)
|
||||
|
@ -117,18 +119,19 @@ class FederationSenderServer(HomeServer):
|
|||
listener["bind_addresses"],
|
||||
listener["port"],
|
||||
manhole(
|
||||
username="matrix",
|
||||
password="rabbithole",
|
||||
globals={"hs": self},
|
||||
)
|
||||
username="matrix", password="rabbithole", globals={"hs": self}
|
||||
),
|
||||
)
|
||||
elif listener["type"] == "metrics":
|
||||
if not self.get_config().enable_metrics:
|
||||
logger.warn(("Metrics listener configured, but "
|
||||
"enable_metrics is not True!"))
|
||||
logger.warn(
|
||||
(
|
||||
"Metrics listener configured, but "
|
||||
"enable_metrics is not True!"
|
||||
)
|
||||
)
|
||||
else:
|
||||
_base.listen_metrics(listener["bind_addresses"],
|
||||
listener["port"])
|
||||
_base.listen_metrics(listener["bind_addresses"], listener["port"])
|
||||
else:
|
||||
logger.warn("Unrecognized listener type: %s", listener["type"])
|
||||
|
||||
|
@ -151,7 +154,9 @@ class FederationSenderReplicationHandler(ReplicationClientHandler):
|
|||
self.send_handler.process_replication_rows(stream_name, token, rows)
|
||||
|
||||
def get_streams_to_replicate(self):
|
||||
args = super(FederationSenderReplicationHandler, self).get_streams_to_replicate()
|
||||
args = super(
|
||||
FederationSenderReplicationHandler, self
|
||||
).get_streams_to_replicate()
|
||||
args.update(self.send_handler.stream_positions())
|
||||
return args
|
||||
|
||||
|
@ -203,6 +208,7 @@ class FederationSenderHandler(object):
|
|||
"""Processes the replication stream and forwards the appropriate entries
|
||||
to the federation sender.
|
||||
"""
|
||||
|
||||
def __init__(self, hs, replication_client):
|
||||
self.store = hs.get_datastore()
|
||||
self._is_mine_id = hs.is_mine_id
|
||||
|
@ -241,7 +247,7 @@ class FederationSenderHandler(object):
|
|||
# ... and when new receipts happen
|
||||
elif stream_name == ReceiptsStream.NAME:
|
||||
run_as_background_process(
|
||||
"process_receipts_for_federation", self._on_new_receipts, rows,
|
||||
"process_receipts_for_federation", self._on_new_receipts, rows
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
|
@ -278,12 +284,14 @@ class FederationSenderHandler(object):
|
|||
|
||||
# We ACK this token over replication so that the master can drop
|
||||
# its in memory queues
|
||||
self.replication_client.send_federation_ack(self.federation_position)
|
||||
self.replication_client.send_federation_ack(
|
||||
self.federation_position
|
||||
)
|
||||
self._last_ack = self.federation_position
|
||||
except Exception:
|
||||
logger.exception("Error updating federation stream position")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
with LoggingContext("main"):
|
||||
start(sys.argv[1:])
|
||||
|
|
|
@ -62,14 +62,11 @@ class PresenceStatusStubServlet(RestServlet):
|
|||
# Pass through the auth headers, if any, in case the access token
|
||||
# is there.
|
||||
auth_headers = request.requestHeaders.getRawHeaders("Authorization", [])
|
||||
headers = {
|
||||
"Authorization": auth_headers,
|
||||
}
|
||||
headers = {"Authorization": auth_headers}
|
||||
|
||||
try:
|
||||
result = yield self.http_client.get_json(
|
||||
self.main_uri + request.uri.decode('ascii'),
|
||||
headers=headers,
|
||||
self.main_uri + request.uri.decode("ascii"), headers=headers
|
||||
)
|
||||
except HttpResponseException as e:
|
||||
raise e.to_synapse_error()
|
||||
|
@ -105,18 +102,19 @@ class KeyUploadServlet(RestServlet):
|
|||
if device_id is not None:
|
||||
# passing the device_id here is deprecated; however, we allow it
|
||||
# for now for compatibility with older clients.
|
||||
if (requester.device_id is not None and
|
||||
device_id != requester.device_id):
|
||||
logger.warning("Client uploading keys for a different device "
|
||||
"(logged in as %s, uploading for %s)",
|
||||
requester.device_id, device_id)
|
||||
if requester.device_id is not None and device_id != requester.device_id:
|
||||
logger.warning(
|
||||
"Client uploading keys for a different device "
|
||||
"(logged in as %s, uploading for %s)",
|
||||
requester.device_id,
|
||||
device_id,
|
||||
)
|
||||
else:
|
||||
device_id = requester.device_id
|
||||
|
||||
if device_id is None:
|
||||
raise SynapseError(
|
||||
400,
|
||||
"To upload keys, you must pass device_id when authenticating"
|
||||
400, "To upload keys, you must pass device_id when authenticating"
|
||||
)
|
||||
|
||||
if body:
|
||||
|
@ -124,13 +122,9 @@ class KeyUploadServlet(RestServlet):
|
|||
# Pass through the auth headers, if any, in case the access token
|
||||
# is there.
|
||||
auth_headers = request.requestHeaders.getRawHeaders(b"Authorization", [])
|
||||
headers = {
|
||||
"Authorization": auth_headers,
|
||||
}
|
||||
headers = {"Authorization": auth_headers}
|
||||
result = yield self.http_client.post_json_get_json(
|
||||
self.main_uri + request.uri.decode('ascii'),
|
||||
body,
|
||||
headers=headers,
|
||||
self.main_uri + request.uri.decode("ascii"), body, headers=headers
|
||||
)
|
||||
|
||||
defer.returnValue((200, result))
|
||||
|
@ -171,12 +165,14 @@ class FrontendProxyServer(HomeServer):
|
|||
if not self.config.use_presence:
|
||||
PresenceStatusStubServlet(self).register(resource)
|
||||
|
||||
resources.update({
|
||||
"/_matrix/client/r0": resource,
|
||||
"/_matrix/client/unstable": resource,
|
||||
"/_matrix/client/v2_alpha": resource,
|
||||
"/_matrix/client/api/v1": resource,
|
||||
})
|
||||
resources.update(
|
||||
{
|
||||
"/_matrix/client/r0": resource,
|
||||
"/_matrix/client/unstable": resource,
|
||||
"/_matrix/client/v2_alpha": resource,
|
||||
"/_matrix/client/api/v1": resource,
|
||||
}
|
||||
)
|
||||
|
||||
root_resource = create_resource_tree(resources, NoResource())
|
||||
|
||||
|
@ -190,7 +186,7 @@ class FrontendProxyServer(HomeServer):
|
|||
root_resource,
|
||||
self.version_string,
|
||||
),
|
||||
reactor=self.get_reactor()
|
||||
reactor=self.get_reactor(),
|
||||
)
|
||||
|
||||
logger.info("Synapse client reader now listening on port %d", port)
|
||||
|
@ -204,18 +200,19 @@ class FrontendProxyServer(HomeServer):
|
|||
listener["bind_addresses"],
|
||||
listener["port"],
|
||||
manhole(
|
||||
username="matrix",
|
||||
password="rabbithole",
|
||||
globals={"hs": self},
|
||||
)
|
||||
username="matrix", password="rabbithole", globals={"hs": self}
|
||||
),
|
||||
)
|
||||
elif listener["type"] == "metrics":
|
||||
if not self.get_config().enable_metrics:
|
||||
logger.warn(("Metrics listener configured, but "
|
||||
"enable_metrics is not True!"))
|
||||
logger.warn(
|
||||
(
|
||||
"Metrics listener configured, but "
|
||||
"enable_metrics is not True!"
|
||||
)
|
||||
)
|
||||
else:
|
||||
_base.listen_metrics(listener["bind_addresses"],
|
||||
listener["port"])
|
||||
_base.listen_metrics(listener["bind_addresses"], listener["port"])
|
||||
else:
|
||||
logger.warn("Unrecognized listener type: %s", listener["type"])
|
||||
|
||||
|
@ -227,9 +224,7 @@ class FrontendProxyServer(HomeServer):
|
|||
|
||||
def start(config_options):
|
||||
try:
|
||||
config = HomeServerConfig.load_config(
|
||||
"Synapse frontend proxy", config_options
|
||||
)
|
||||
config = HomeServerConfig.load_config("Synapse frontend proxy", config_options)
|
||||
except ConfigError as e:
|
||||
sys.stderr.write("\n" + str(e) + "\n")
|
||||
sys.exit(1)
|
||||
|
@ -258,6 +253,6 @@ def start(config_options):
|
|||
_base.start_worker_reactor("synapse-frontend-proxy", config)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
with LoggingContext("main"):
|
||||
start(sys.argv[1:])
|
||||
|
|
|
@ -101,13 +101,12 @@ class SynapseHomeServer(HomeServer):
|
|||
# Skip loading openid resource if federation is defined
|
||||
# since federation resource will include openid
|
||||
continue
|
||||
resources.update(self._configure_named_resource(
|
||||
name, res.get("compress", False),
|
||||
))
|
||||
resources.update(
|
||||
self._configure_named_resource(name, res.get("compress", False))
|
||||
)
|
||||
|
||||
additional_resources = listener_config.get("additional_resources", {})
|
||||
logger.debug("Configuring additional resources: %r",
|
||||
additional_resources)
|
||||
logger.debug("Configuring additional resources: %r", additional_resources)
|
||||
module_api = ModuleApi(self, self.get_auth_handler())
|
||||
for path, resmodule in additional_resources.items():
|
||||
handler_cls, config = load_module(resmodule)
|
||||
|
@ -174,59 +173,67 @@ class SynapseHomeServer(HomeServer):
|
|||
if compress:
|
||||
client_resource = gz_wrap(client_resource)
|
||||
|
||||
resources.update({
|
||||
"/_matrix/client/api/v1": client_resource,
|
||||
"/_matrix/client/r0": client_resource,
|
||||
"/_matrix/client/unstable": client_resource,
|
||||
"/_matrix/client/v2_alpha": client_resource,
|
||||
"/_matrix/client/versions": client_resource,
|
||||
"/.well-known/matrix/client": WellKnownResource(self),
|
||||
"/_synapse/admin": AdminRestResource(self),
|
||||
})
|
||||
resources.update(
|
||||
{
|
||||
"/_matrix/client/api/v1": client_resource,
|
||||
"/_matrix/client/r0": client_resource,
|
||||
"/_matrix/client/unstable": client_resource,
|
||||
"/_matrix/client/v2_alpha": client_resource,
|
||||
"/_matrix/client/versions": client_resource,
|
||||
"/.well-known/matrix/client": WellKnownResource(self),
|
||||
"/_synapse/admin": AdminRestResource(self),
|
||||
}
|
||||
)
|
||||
|
||||
if self.get_config().saml2_enabled:
|
||||
from synapse.rest.saml2 import SAML2Resource
|
||||
|
||||
resources["/_matrix/saml2"] = SAML2Resource(self)
|
||||
|
||||
if name == "consent":
|
||||
from synapse.rest.consent.consent_resource import ConsentResource
|
||||
|
||||
consent_resource = ConsentResource(self)
|
||||
if compress:
|
||||
consent_resource = gz_wrap(consent_resource)
|
||||
resources.update({
|
||||
"/_matrix/consent": consent_resource,
|
||||
})
|
||||
resources.update({"/_matrix/consent": consent_resource})
|
||||
|
||||
if name == "federation":
|
||||
resources.update({
|
||||
FEDERATION_PREFIX: TransportLayerServer(self),
|
||||
})
|
||||
resources.update({FEDERATION_PREFIX: TransportLayerServer(self)})
|
||||
|
||||
if name == "openid":
|
||||
resources.update({
|
||||
FEDERATION_PREFIX: TransportLayerServer(self, servlet_groups=["openid"]),
|
||||
})
|
||||
resources.update(
|
||||
{
|
||||
FEDERATION_PREFIX: TransportLayerServer(
|
||||
self, servlet_groups=["openid"]
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
if name in ["static", "client"]:
|
||||
resources.update({
|
||||
STATIC_PREFIX: File(
|
||||
os.path.join(os.path.dirname(synapse.__file__), "static")
|
||||
),
|
||||
})
|
||||
resources.update(
|
||||
{
|
||||
STATIC_PREFIX: File(
|
||||
os.path.join(os.path.dirname(synapse.__file__), "static")
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
if name in ["media", "federation", "client"]:
|
||||
if self.get_config().enable_media_repo:
|
||||
media_repo = self.get_media_repository_resource()
|
||||
resources.update({
|
||||
MEDIA_PREFIX: media_repo,
|
||||
LEGACY_MEDIA_PREFIX: media_repo,
|
||||
CONTENT_REPO_PREFIX: ContentRepoResource(
|
||||
self, self.config.uploads_path
|
||||
),
|
||||
})
|
||||
resources.update(
|
||||
{
|
||||
MEDIA_PREFIX: media_repo,
|
||||
LEGACY_MEDIA_PREFIX: media_repo,
|
||||
CONTENT_REPO_PREFIX: ContentRepoResource(
|
||||
self, self.config.uploads_path
|
||||
),
|
||||
}
|
||||
)
|
||||
elif name == "media":
|
||||
raise ConfigError(
|
||||
"'media' resource conflicts with enable_media_repo=False",
|
||||
"'media' resource conflicts with enable_media_repo=False"
|
||||
)
|
||||
|
||||
if name in ["keys", "federation"]:
|
||||
|
@ -257,18 +264,14 @@ class SynapseHomeServer(HomeServer):
|
|||
|
||||
for listener in listeners:
|
||||
if listener["type"] == "http":
|
||||
self._listening_services.extend(
|
||||
self._listener_http(config, listener)
|
||||
)
|
||||
self._listening_services.extend(self._listener_http(config, listener))
|
||||
elif listener["type"] == "manhole":
|
||||
listen_tcp(
|
||||
listener["bind_addresses"],
|
||||
listener["port"],
|
||||
manhole(
|
||||
username="matrix",
|
||||
password="rabbithole",
|
||||
globals={"hs": self},
|
||||
)
|
||||
username="matrix", password="rabbithole", globals={"hs": self}
|
||||
),
|
||||
)
|
||||
elif listener["type"] == "replication":
|
||||
services = listen_tcp(
|
||||
|
@ -277,16 +280,17 @@ class SynapseHomeServer(HomeServer):
|
|||
ReplicationStreamProtocolFactory(self),
|
||||
)
|
||||
for s in services:
|
||||
reactor.addSystemEventTrigger(
|
||||
"before", "shutdown", s.stopListening,
|
||||
)
|
||||
reactor.addSystemEventTrigger("before", "shutdown", s.stopListening)
|
||||
elif listener["type"] == "metrics":
|
||||
if not self.get_config().enable_metrics:
|
||||
logger.warn(("Metrics listener configured, but "
|
||||
"enable_metrics is not True!"))
|
||||
logger.warn(
|
||||
(
|
||||
"Metrics listener configured, but "
|
||||
"enable_metrics is not True!"
|
||||
)
|
||||
)
|
||||
else:
|
||||
_base.listen_metrics(listener["bind_addresses"],
|
||||
listener["port"])
|
||||
_base.listen_metrics(listener["bind_addresses"], listener["port"])
|
||||
else:
|
||||
logger.warn("Unrecognized listener type: %s", listener["type"])
|
||||
|
||||
|
@ -312,7 +316,7 @@ current_mau_gauge = Gauge("synapse_admin_mau:current", "Current MAU")
|
|||
max_mau_gauge = Gauge("synapse_admin_mau:max", "MAU Limit")
|
||||
registered_reserved_users_mau_gauge = Gauge(
|
||||
"synapse_admin_mau:registered_reserved_users",
|
||||
"Registered users with reserved threepids"
|
||||
"Registered users with reserved threepids",
|
||||
)
|
||||
|
||||
|
||||
|
@ -327,8 +331,7 @@ def setup(config_options):
|
|||
"""
|
||||
try:
|
||||
config = HomeServerConfig.load_or_generate_config(
|
||||
"Synapse Homeserver",
|
||||
config_options,
|
||||
"Synapse Homeserver", config_options
|
||||
)
|
||||
except ConfigError as e:
|
||||
sys.stderr.write("\n" + str(e) + "\n")
|
||||
|
@ -339,10 +342,7 @@ def setup(config_options):
|
|||
# generating config files and shouldn't try to continue.
|
||||
sys.exit(0)
|
||||
|
||||
synapse.config.logger.setup_logging(
|
||||
config,
|
||||
use_worker_options=False
|
||||
)
|
||||
synapse.config.logger.setup_logging(config, use_worker_options=False)
|
||||
|
||||
events.USE_FROZEN_DICTS = config.use_frozen_dicts
|
||||
|
||||
|
@ -357,7 +357,7 @@ def setup(config_options):
|
|||
database_engine=database_engine,
|
||||
)
|
||||
|
||||
logger.info("Preparing database: %s...", config.database_config['name'])
|
||||
logger.info("Preparing database: %s...", config.database_config["name"])
|
||||
|
||||
try:
|
||||
with hs.get_db_conn(run_new_connection=False) as db_conn:
|
||||
|
@ -375,7 +375,7 @@ def setup(config_options):
|
|||
)
|
||||
sys.exit(1)
|
||||
|
||||
logger.info("Database prepared in %s.", config.database_config['name'])
|
||||
logger.info("Database prepared in %s.", config.database_config["name"])
|
||||
|
||||
hs.setup()
|
||||
hs.setup_master()
|
||||
|
@ -391,9 +391,7 @@ def setup(config_options):
|
|||
acme = hs.get_acme_handler()
|
||||
|
||||
# Check how long the certificate is active for.
|
||||
cert_days_remaining = hs.config.is_disk_cert_valid(
|
||||
allow_self_signed=False
|
||||
)
|
||||
cert_days_remaining = hs.config.is_disk_cert_valid(allow_self_signed=False)
|
||||
|
||||
# We want to reprovision if cert_days_remaining is None (meaning no
|
||||
# certificate exists), or the days remaining number it returns
|
||||
|
@ -401,8 +399,8 @@ def setup(config_options):
|
|||
provision = False
|
||||
|
||||
if (
|
||||
cert_days_remaining is None or
|
||||
cert_days_remaining < hs.config.acme_reprovision_threshold
|
||||
cert_days_remaining is None
|
||||
or cert_days_remaining < hs.config.acme_reprovision_threshold
|
||||
):
|
||||
provision = True
|
||||
|
||||
|
@ -433,10 +431,7 @@ def setup(config_options):
|
|||
yield do_acme()
|
||||
|
||||
# Check if it needs to be reprovisioned every day.
|
||||
hs.get_clock().looping_call(
|
||||
reprovision_acme,
|
||||
24 * 60 * 60 * 1000
|
||||
)
|
||||
hs.get_clock().looping_call(reprovision_acme, 24 * 60 * 60 * 1000)
|
||||
|
||||
_base.start(hs, config.listeners)
|
||||
|
||||
|
@ -463,6 +458,7 @@ class SynapseService(service.Service):
|
|||
A twisted Service class that will start synapse. Used to run synapse
|
||||
via twistd and a .tac.
|
||||
"""
|
||||
|
||||
def __init__(self, config):
|
||||
self.config = config
|
||||
|
||||
|
@ -479,6 +475,7 @@ class SynapseService(service.Service):
|
|||
def run(hs):
|
||||
PROFILE_SYNAPSE = False
|
||||
if PROFILE_SYNAPSE:
|
||||
|
||||
def profile(func):
|
||||
from cProfile import Profile
|
||||
from threading import current_thread
|
||||
|
@ -489,13 +486,14 @@ def run(hs):
|
|||
func(*args, **kargs)
|
||||
profile.disable()
|
||||
ident = current_thread().ident
|
||||
profile.dump_stats("/tmp/%s.%s.%i.pstat" % (
|
||||
hs.hostname, func.__name__, ident
|
||||
))
|
||||
profile.dump_stats(
|
||||
"/tmp/%s.%s.%i.pstat" % (hs.hostname, func.__name__, ident)
|
||||
)
|
||||
|
||||
return profiled
|
||||
|
||||
from twisted.python.threadpool import ThreadPool
|
||||
|
||||
ThreadPool._worker = profile(ThreadPool._worker)
|
||||
reactor.run = profile(reactor.run)
|
||||
|
||||
|
@ -541,7 +539,9 @@ def run(hs):
|
|||
|
||||
stats["daily_active_users"] = yield hs.get_datastore().count_daily_users()
|
||||
stats["monthly_active_users"] = yield hs.get_datastore().count_monthly_users()
|
||||
stats["daily_active_rooms"] = yield hs.get_datastore().count_daily_active_rooms()
|
||||
stats[
|
||||
"daily_active_rooms"
|
||||
] = yield hs.get_datastore().count_daily_active_rooms()
|
||||
stats["daily_messages"] = yield hs.get_datastore().count_daily_messages()
|
||||
|
||||
r30_results = yield hs.get_datastore().count_r30_users()
|
||||
|
@ -565,8 +565,7 @@ def run(hs):
|
|||
logger.info("Reporting stats to matrix.org: %s" % (stats,))
|
||||
try:
|
||||
yield hs.get_simple_http_client().put_json(
|
||||
"https://matrix.org/report-usage-stats/push",
|
||||
stats
|
||||
"https://matrix.org/report-usage-stats/push", stats
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warn("Error reporting stats: %s", e)
|
||||
|
@ -581,14 +580,11 @@ def run(hs):
|
|||
logger.info("report_stats can use psutil")
|
||||
stats_process.append(process)
|
||||
except (AttributeError):
|
||||
logger.warning(
|
||||
"Unable to read memory/cpu stats. Disabling reporting."
|
||||
)
|
||||
logger.warning("Unable to read memory/cpu stats. Disabling reporting.")
|
||||
|
||||
def generate_user_daily_visit_stats():
|
||||
return run_as_background_process(
|
||||
"generate_user_daily_visits",
|
||||
hs.get_datastore().generate_user_daily_visits,
|
||||
"generate_user_daily_visits", hs.get_datastore().generate_user_daily_visits
|
||||
)
|
||||
|
||||
# Rather than update on per session basis, batch up the requests.
|
||||
|
@ -599,9 +595,9 @@ def run(hs):
|
|||
# monthly active user limiting functionality
|
||||
def reap_monthly_active_users():
|
||||
return run_as_background_process(
|
||||
"reap_monthly_active_users",
|
||||
hs.get_datastore().reap_monthly_active_users,
|
||||
"reap_monthly_active_users", hs.get_datastore().reap_monthly_active_users
|
||||
)
|
||||
|
||||
clock.looping_call(reap_monthly_active_users, 1000 * 60 * 60)
|
||||
reap_monthly_active_users()
|
||||
|
||||
|
@ -619,8 +615,7 @@ def run(hs):
|
|||
|
||||
def start_generate_monthly_active_users():
|
||||
return run_as_background_process(
|
||||
"generate_monthly_active_users",
|
||||
generate_monthly_active_users,
|
||||
"generate_monthly_active_users", generate_monthly_active_users
|
||||
)
|
||||
|
||||
start_generate_monthly_active_users()
|
||||
|
@ -660,5 +655,5 @@ def main():
|
|||
run(hs)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
|
@ -72,13 +72,15 @@ class MediaRepositoryServer(HomeServer):
|
|||
resources[METRICS_PREFIX] = MetricsResource(RegistryProxy)
|
||||
elif name == "media":
|
||||
media_repo = self.get_media_repository_resource()
|
||||
resources.update({
|
||||
MEDIA_PREFIX: media_repo,
|
||||
LEGACY_MEDIA_PREFIX: media_repo,
|
||||
CONTENT_REPO_PREFIX: ContentRepoResource(
|
||||
self, self.config.uploads_path
|
||||
),
|
||||
})
|
||||
resources.update(
|
||||
{
|
||||
MEDIA_PREFIX: media_repo,
|
||||
LEGACY_MEDIA_PREFIX: media_repo,
|
||||
CONTENT_REPO_PREFIX: ContentRepoResource(
|
||||
self, self.config.uploads_path
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
root_resource = create_resource_tree(resources, NoResource())
|
||||
|
||||
|
@ -91,7 +93,7 @@ class MediaRepositoryServer(HomeServer):
|
|||
listener_config,
|
||||
root_resource,
|
||||
self.version_string,
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
logger.info("Synapse media repository now listening on port %d", port)
|
||||
|
@ -105,18 +107,19 @@ class MediaRepositoryServer(HomeServer):
|
|||
listener["bind_addresses"],
|
||||
listener["port"],
|
||||
manhole(
|
||||
username="matrix",
|
||||
password="rabbithole",
|
||||
globals={"hs": self},
|
||||
)
|
||||
username="matrix", password="rabbithole", globals={"hs": self}
|
||||
),
|
||||
)
|
||||
elif listener["type"] == "metrics":
|
||||
if not self.get_config().enable_metrics:
|
||||
logger.warn(("Metrics listener configured, but "
|
||||
"enable_metrics is not True!"))
|
||||
logger.warn(
|
||||
(
|
||||
"Metrics listener configured, but "
|
||||
"enable_metrics is not True!"
|
||||
)
|
||||
)
|
||||
else:
|
||||
_base.listen_metrics(listener["bind_addresses"],
|
||||
listener["port"])
|
||||
_base.listen_metrics(listener["bind_addresses"], listener["port"])
|
||||
else:
|
||||
logger.warn("Unrecognized listener type: %s", listener["type"])
|
||||
|
||||
|
@ -164,6 +167,6 @@ def start(config_options):
|
|||
_base.start_worker_reactor("synapse-media-repository", config)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
with LoggingContext("main"):
|
||||
start(sys.argv[1:])
|
||||
|
|
|
@ -46,36 +46,27 @@ logger = logging.getLogger("synapse.app.pusher")
|
|||
|
||||
|
||||
class PusherSlaveStore(
|
||||
SlavedEventStore, SlavedPusherStore, SlavedReceiptsStore,
|
||||
SlavedAccountDataStore
|
||||
SlavedEventStore, SlavedPusherStore, SlavedReceiptsStore, SlavedAccountDataStore
|
||||
):
|
||||
update_pusher_last_stream_ordering_and_success = (
|
||||
__func__(DataStore.update_pusher_last_stream_ordering_and_success)
|
||||
update_pusher_last_stream_ordering_and_success = __func__(
|
||||
DataStore.update_pusher_last_stream_ordering_and_success
|
||||
)
|
||||
|
||||
update_pusher_failing_since = (
|
||||
__func__(DataStore.update_pusher_failing_since)
|
||||
update_pusher_failing_since = __func__(DataStore.update_pusher_failing_since)
|
||||
|
||||
update_pusher_last_stream_ordering = __func__(
|
||||
DataStore.update_pusher_last_stream_ordering
|
||||
)
|
||||
|
||||
update_pusher_last_stream_ordering = (
|
||||
__func__(DataStore.update_pusher_last_stream_ordering)
|
||||
get_throttle_params_by_room = __func__(DataStore.get_throttle_params_by_room)
|
||||
|
||||
set_throttle_params = __func__(DataStore.set_throttle_params)
|
||||
|
||||
get_time_of_last_push_action_before = __func__(
|
||||
DataStore.get_time_of_last_push_action_before
|
||||
)
|
||||
|
||||
get_throttle_params_by_room = (
|
||||
__func__(DataStore.get_throttle_params_by_room)
|
||||
)
|
||||
|
||||
set_throttle_params = (
|
||||
__func__(DataStore.set_throttle_params)
|
||||
)
|
||||
|
||||
get_time_of_last_push_action_before = (
|
||||
__func__(DataStore.get_time_of_last_push_action_before)
|
||||
)
|
||||
|
||||
get_profile_displayname = (
|
||||
__func__(DataStore.get_profile_displayname)
|
||||
)
|
||||
get_profile_displayname = __func__(DataStore.get_profile_displayname)
|
||||
|
||||
|
||||
class PusherServer(HomeServer):
|
||||
|
@ -105,7 +96,7 @@ class PusherServer(HomeServer):
|
|||
listener_config,
|
||||
root_resource,
|
||||
self.version_string,
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
logger.info("Synapse pusher now listening on port %d", port)
|
||||
|
@ -119,18 +110,19 @@ class PusherServer(HomeServer):
|
|||
listener["bind_addresses"],
|
||||
listener["port"],
|
||||
manhole(
|
||||
username="matrix",
|
||||
password="rabbithole",
|
||||
globals={"hs": self},
|
||||
)
|
||||
username="matrix", password="rabbithole", globals={"hs": self}
|
||||
),
|
||||
)
|
||||
elif listener["type"] == "metrics":
|
||||
if not self.get_config().enable_metrics:
|
||||
logger.warn(("Metrics listener configured, but "
|
||||
"enable_metrics is not True!"))
|
||||
logger.warn(
|
||||
(
|
||||
"Metrics listener configured, but "
|
||||
"enable_metrics is not True!"
|
||||
)
|
||||
)
|
||||
else:
|
||||
_base.listen_metrics(listener["bind_addresses"],
|
||||
listener["port"])
|
||||
_base.listen_metrics(listener["bind_addresses"], listener["port"])
|
||||
else:
|
||||
logger.warn("Unrecognized listener type: %s", listener["type"])
|
||||
|
||||
|
@ -161,9 +153,7 @@ class PusherReplicationHandler(ReplicationClientHandler):
|
|||
else:
|
||||
yield self.start_pusher(row.user_id, row.app_id, row.pushkey)
|
||||
elif stream_name == "events":
|
||||
yield self.pusher_pool.on_new_notifications(
|
||||
token, token,
|
||||
)
|
||||
yield self.pusher_pool.on_new_notifications(token, token)
|
||||
elif stream_name == "receipts":
|
||||
yield self.pusher_pool.on_new_receipts(
|
||||
token, token, set(row.room_id for row in rows)
|
||||
|
@ -188,9 +178,7 @@ class PusherReplicationHandler(ReplicationClientHandler):
|
|||
|
||||
def start(config_options):
|
||||
try:
|
||||
config = HomeServerConfig.load_config(
|
||||
"Synapse pusher", config_options
|
||||
)
|
||||
config = HomeServerConfig.load_config("Synapse pusher", config_options)
|
||||
except ConfigError as e:
|
||||
sys.stderr.write("\n" + str(e) + "\n")
|
||||
sys.exit(1)
|
||||
|
@ -234,6 +222,6 @@ def start(config_options):
|
|||
_base.start_worker_reactor("synapse-pusher", config)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
with LoggingContext("main"):
|
||||
ps = start(sys.argv[1:])
|
||||
|
|
|
@ -98,10 +98,7 @@ class SynchrotronPresence(object):
|
|||
self.notifier = hs.get_notifier()
|
||||
|
||||
active_presence = self.store.take_presence_startup_info()
|
||||
self.user_to_current_state = {
|
||||
state.user_id: state
|
||||
for state in active_presence
|
||||
}
|
||||
self.user_to_current_state = {state.user_id: state for state in active_presence}
|
||||
|
||||
# user_id -> last_sync_ms. Lists the users that have stopped syncing
|
||||
# but we haven't notified the master of that yet
|
||||
|
@ -196,17 +193,26 @@ class SynchrotronPresence(object):
|
|||
room_ids_to_states, users_to_states = parties
|
||||
|
||||
self.notifier.on_new_event(
|
||||
"presence_key", stream_id, rooms=room_ids_to_states.keys(),
|
||||
users=users_to_states.keys()
|
||||
"presence_key",
|
||||
stream_id,
|
||||
rooms=room_ids_to_states.keys(),
|
||||
users=users_to_states.keys(),
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def process_replication_rows(self, token, rows):
|
||||
states = [UserPresenceState(
|
||||
row.user_id, row.state, row.last_active_ts,
|
||||
row.last_federation_update_ts, row.last_user_sync_ts, row.status_msg,
|
||||
row.currently_active
|
||||
) for row in rows]
|
||||
states = [
|
||||
UserPresenceState(
|
||||
row.user_id,
|
||||
row.state,
|
||||
row.last_active_ts,
|
||||
row.last_federation_update_ts,
|
||||
row.last_user_sync_ts,
|
||||
row.status_msg,
|
||||
row.currently_active,
|
||||
)
|
||||
for row in rows
|
||||
]
|
||||
|
||||
for state in states:
|
||||
self.user_to_current_state[state.user_id] = state
|
||||
|
@ -217,7 +223,8 @@ class SynchrotronPresence(object):
|
|||
def get_currently_syncing_users(self):
|
||||
if self.hs.config.use_presence:
|
||||
return [
|
||||
user_id for user_id, count in iteritems(self.user_to_num_current_syncs)
|
||||
user_id
|
||||
for user_id, count in iteritems(self.user_to_num_current_syncs)
|
||||
if count > 0
|
||||
]
|
||||
else:
|
||||
|
@ -281,12 +288,14 @@ class SynchrotronServer(HomeServer):
|
|||
events.register_servlets(self, resource)
|
||||
InitialSyncRestServlet(self).register(resource)
|
||||
RoomInitialSyncRestServlet(self).register(resource)
|
||||
resources.update({
|
||||
"/_matrix/client/r0": resource,
|
||||
"/_matrix/client/unstable": resource,
|
||||
"/_matrix/client/v2_alpha": resource,
|
||||
"/_matrix/client/api/v1": resource,
|
||||
})
|
||||
resources.update(
|
||||
{
|
||||
"/_matrix/client/r0": resource,
|
||||
"/_matrix/client/unstable": resource,
|
||||
"/_matrix/client/v2_alpha": resource,
|
||||
"/_matrix/client/api/v1": resource,
|
||||
}
|
||||
)
|
||||
|
||||
root_resource = create_resource_tree(resources, NoResource())
|
||||
|
||||
|
@ -299,7 +308,7 @@ class SynchrotronServer(HomeServer):
|
|||
listener_config,
|
||||
root_resource,
|
||||
self.version_string,
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
logger.info("Synapse synchrotron now listening on port %d", port)
|
||||
|
@ -313,18 +322,19 @@ class SynchrotronServer(HomeServer):
|
|||
listener["bind_addresses"],
|
||||
listener["port"],
|
||||
manhole(
|
||||
username="matrix",
|
||||
password="rabbithole",
|
||||
globals={"hs": self},
|
||||
)
|
||||
username="matrix", password="rabbithole", globals={"hs": self}
|
||||
),
|
||||
)
|
||||
elif listener["type"] == "metrics":
|
||||
if not self.get_config().enable_metrics:
|
||||
logger.warn(("Metrics listener configured, but "
|
||||
"enable_metrics is not True!"))
|
||||
logger.warn(
|
||||
(
|
||||
"Metrics listener configured, but "
|
||||
"enable_metrics is not True!"
|
||||
)
|
||||
)
|
||||
else:
|
||||
_base.listen_metrics(listener["bind_addresses"],
|
||||
listener["port"])
|
||||
_base.listen_metrics(listener["bind_addresses"], listener["port"])
|
||||
else:
|
||||
logger.warn("Unrecognized listener type: %s", listener["type"])
|
||||
|
||||
|
@ -382,40 +392,36 @@ class SyncReplicationHandler(ReplicationClientHandler):
|
|||
)
|
||||
elif stream_name == "push_rules":
|
||||
self.notifier.on_new_event(
|
||||
"push_rules_key", token, users=[row.user_id for row in rows],
|
||||
"push_rules_key", token, users=[row.user_id for row in rows]
|
||||
)
|
||||
elif stream_name in ("account_data", "tag_account_data",):
|
||||
elif stream_name in ("account_data", "tag_account_data"):
|
||||
self.notifier.on_new_event(
|
||||
"account_data_key", token, users=[row.user_id for row in rows],
|
||||
"account_data_key", token, users=[row.user_id for row in rows]
|
||||
)
|
||||
elif stream_name == "receipts":
|
||||
self.notifier.on_new_event(
|
||||
"receipt_key", token, rooms=[row.room_id for row in rows],
|
||||
"receipt_key", token, rooms=[row.room_id for row in rows]
|
||||
)
|
||||
elif stream_name == "typing":
|
||||
self.typing_handler.process_replication_rows(token, rows)
|
||||
self.notifier.on_new_event(
|
||||
"typing_key", token, rooms=[row.room_id for row in rows],
|
||||
"typing_key", token, rooms=[row.room_id for row in rows]
|
||||
)
|
||||
elif stream_name == "to_device":
|
||||
entities = [row.entity for row in rows if row.entity.startswith("@")]
|
||||
if entities:
|
||||
self.notifier.on_new_event(
|
||||
"to_device_key", token, users=entities,
|
||||
)
|
||||
self.notifier.on_new_event("to_device_key", token, users=entities)
|
||||
elif stream_name == "device_lists":
|
||||
all_room_ids = set()
|
||||
for row in rows:
|
||||
room_ids = yield self.store.get_rooms_for_user(row.user_id)
|
||||
all_room_ids.update(room_ids)
|
||||
self.notifier.on_new_event(
|
||||
"device_list_key", token, rooms=all_room_ids,
|
||||
)
|
||||
self.notifier.on_new_event("device_list_key", token, rooms=all_room_ids)
|
||||
elif stream_name == "presence":
|
||||
yield self.presence_handler.process_replication_rows(token, rows)
|
||||
elif stream_name == "receipts":
|
||||
self.notifier.on_new_event(
|
||||
"groups_key", token, users=[row.user_id for row in rows],
|
||||
"groups_key", token, users=[row.user_id for row in rows]
|
||||
)
|
||||
except Exception:
|
||||
logger.exception("Error processing replication")
|
||||
|
@ -423,9 +429,7 @@ class SyncReplicationHandler(ReplicationClientHandler):
|
|||
|
||||
def start(config_options):
|
||||
try:
|
||||
config = HomeServerConfig.load_config(
|
||||
"Synapse synchrotron", config_options
|
||||
)
|
||||
config = HomeServerConfig.load_config("Synapse synchrotron", config_options)
|
||||
except ConfigError as e:
|
||||
sys.stderr.write("\n" + str(e) + "\n")
|
||||
sys.exit(1)
|
||||
|
@ -453,6 +457,6 @@ def start(config_options):
|
|||
_base.start_worker_reactor("synapse-synchrotron", config)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
with LoggingContext("main"):
|
||||
start(sys.argv[1:])
|
||||
|
|
|
@ -66,14 +66,16 @@ class UserDirectorySlaveStore(
|
|||
|
||||
events_max = self._stream_id_gen.get_current_token()
|
||||
curr_state_delta_prefill, min_curr_state_delta_id = self._get_cache_dict(
|
||||
db_conn, "current_state_delta_stream",
|
||||
db_conn,
|
||||
"current_state_delta_stream",
|
||||
entity_column="room_id",
|
||||
stream_column="stream_id",
|
||||
max_value=events_max, # As we share the stream id with events token
|
||||
limit=1000,
|
||||
)
|
||||
self._curr_state_delta_stream_cache = StreamChangeCache(
|
||||
"_curr_state_delta_stream_cache", min_curr_state_delta_id,
|
||||
"_curr_state_delta_stream_cache",
|
||||
min_curr_state_delta_id,
|
||||
prefilled_cache=curr_state_delta_prefill,
|
||||
)
|
||||
|
||||
|
@ -110,12 +112,14 @@ class UserDirectoryServer(HomeServer):
|
|||
elif name == "client":
|
||||
resource = JsonResource(self, canonical_json=False)
|
||||
user_directory.register_servlets(self, resource)
|
||||
resources.update({
|
||||
"/_matrix/client/r0": resource,
|
||||
"/_matrix/client/unstable": resource,
|
||||
"/_matrix/client/v2_alpha": resource,
|
||||
"/_matrix/client/api/v1": resource,
|
||||
})
|
||||
resources.update(
|
||||
{
|
||||
"/_matrix/client/r0": resource,
|
||||
"/_matrix/client/unstable": resource,
|
||||
"/_matrix/client/v2_alpha": resource,
|
||||
"/_matrix/client/api/v1": resource,
|
||||
}
|
||||
)
|
||||
|
||||
root_resource = create_resource_tree(resources, NoResource())
|
||||
|
||||
|
@ -128,7 +132,7 @@ class UserDirectoryServer(HomeServer):
|
|||
listener_config,
|
||||
root_resource,
|
||||
self.version_string,
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
logger.info("Synapse user_dir now listening on port %d", port)
|
||||
|
@ -142,18 +146,19 @@ class UserDirectoryServer(HomeServer):
|
|||
listener["bind_addresses"],
|
||||
listener["port"],
|
||||
manhole(
|
||||
username="matrix",
|
||||
password="rabbithole",
|
||||
globals={"hs": self},
|
||||
)
|
||||
username="matrix", password="rabbithole", globals={"hs": self}
|
||||
),
|
||||
)
|
||||
elif listener["type"] == "metrics":
|
||||
if not self.get_config().enable_metrics:
|
||||
logger.warn(("Metrics listener configured, but "
|
||||
"enable_metrics is not True!"))
|
||||
logger.warn(
|
||||
(
|
||||
"Metrics listener configured, but "
|
||||
"enable_metrics is not True!"
|
||||
)
|
||||
)
|
||||
else:
|
||||
_base.listen_metrics(listener["bind_addresses"],
|
||||
listener["port"])
|
||||
_base.listen_metrics(listener["bind_addresses"], listener["port"])
|
||||
else:
|
||||
logger.warn("Unrecognized listener type: %s", listener["type"])
|
||||
|
||||
|
@ -186,9 +191,7 @@ class UserDirectoryReplicationHandler(ReplicationClientHandler):
|
|||
|
||||
def start(config_options):
|
||||
try:
|
||||
config = HomeServerConfig.load_config(
|
||||
"Synapse user directory", config_options
|
||||
)
|
||||
config = HomeServerConfig.load_config("Synapse user directory", config_options)
|
||||
except ConfigError as e:
|
||||
sys.stderr.write("\n" + str(e) + "\n")
|
||||
sys.exit(1)
|
||||
|
@ -227,6 +230,6 @@ def start(config_options):
|
|||
_base.start_worker_reactor("synapse-user-dir", config)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
with LoggingContext("main"):
|
||||
start(sys.argv[1:])
|
||||
|
|
|
@ -48,9 +48,7 @@ class AppServiceTransaction(object):
|
|||
A Deferred which resolves to True if the transaction was sent.
|
||||
"""
|
||||
return as_api.push_bulk(
|
||||
service=self.service,
|
||||
events=self.events,
|
||||
txn_id=self.id
|
||||
service=self.service, events=self.events, txn_id=self.id
|
||||
)
|
||||
|
||||
def complete(self, store):
|
||||
|
@ -64,10 +62,7 @@ class AppServiceTransaction(object):
|
|||
Returns:
|
||||
A Deferred which resolves to True if the transaction was completed.
|
||||
"""
|
||||
return store.complete_appservice_txn(
|
||||
service=self.service,
|
||||
txn_id=self.id
|
||||
)
|
||||
return store.complete_appservice_txn(service=self.service, txn_id=self.id)
|
||||
|
||||
|
||||
class ApplicationService(object):
|
||||
|
@ -76,6 +71,7 @@ class ApplicationService(object):
|
|||
|
||||
Provides methods to check if this service is "interested" in events.
|
||||
"""
|
||||
|
||||
NS_USERS = "users"
|
||||
NS_ALIASES = "aliases"
|
||||
NS_ROOMS = "rooms"
|
||||
|
@ -84,9 +80,19 @@ class ApplicationService(object):
|
|||
# values.
|
||||
NS_LIST = [NS_USERS, NS_ALIASES, NS_ROOMS]
|
||||
|
||||
def __init__(self, token, hostname, url=None, namespaces=None, hs_token=None,
|
||||
sender=None, id=None, protocols=None, rate_limited=True,
|
||||
ip_range_whitelist=None):
|
||||
def __init__(
|
||||
self,
|
||||
token,
|
||||
hostname,
|
||||
url=None,
|
||||
namespaces=None,
|
||||
hs_token=None,
|
||||
sender=None,
|
||||
id=None,
|
||||
protocols=None,
|
||||
rate_limited=True,
|
||||
ip_range_whitelist=None,
|
||||
):
|
||||
self.token = token
|
||||
self.url = url
|
||||
self.hs_token = hs_token
|
||||
|
@ -128,9 +134,7 @@ class ApplicationService(object):
|
|||
if not isinstance(regex_obj, dict):
|
||||
raise ValueError("Expected dict regex for ns '%s'" % ns)
|
||||
if not isinstance(regex_obj.get("exclusive"), bool):
|
||||
raise ValueError(
|
||||
"Expected bool for 'exclusive' in ns '%s'" % ns
|
||||
)
|
||||
raise ValueError("Expected bool for 'exclusive' in ns '%s'" % ns)
|
||||
group_id = regex_obj.get("group_id")
|
||||
if group_id:
|
||||
if not isinstance(group_id, str):
|
||||
|
@ -153,9 +157,7 @@ class ApplicationService(object):
|
|||
if isinstance(regex, string_types):
|
||||
regex_obj["regex"] = re.compile(regex) # Pre-compile regex
|
||||
else:
|
||||
raise ValueError(
|
||||
"Expected string for 'regex' in ns '%s'" % ns
|
||||
)
|
||||
raise ValueError("Expected string for 'regex' in ns '%s'" % ns)
|
||||
return namespaces
|
||||
|
||||
def _matches_regex(self, test_string, namespace_key):
|
||||
|
@ -178,8 +180,9 @@ class ApplicationService(object):
|
|||
if self.is_interested_in_user(event.sender):
|
||||
defer.returnValue(True)
|
||||
# also check m.room.member state key
|
||||
if (event.type == EventTypes.Member and
|
||||
self.is_interested_in_user(event.state_key)):
|
||||
if event.type == EventTypes.Member and self.is_interested_in_user(
|
||||
event.state_key
|
||||
):
|
||||
defer.returnValue(True)
|
||||
|
||||
if not store:
|
||||
|
|
|
@ -32,19 +32,17 @@ logger = logging.getLogger(__name__)
|
|||
sent_transactions_counter = Counter(
|
||||
"synapse_appservice_api_sent_transactions",
|
||||
"Number of /transactions/ requests sent",
|
||||
["service"]
|
||||
["service"],
|
||||
)
|
||||
|
||||
failed_transactions_counter = Counter(
|
||||
"synapse_appservice_api_failed_transactions",
|
||||
"Number of /transactions/ requests that failed to send",
|
||||
["service"]
|
||||
["service"],
|
||||
)
|
||||
|
||||
sent_events_counter = Counter(
|
||||
"synapse_appservice_api_sent_events",
|
||||
"Number of events sent to the AS",
|
||||
["service"]
|
||||
"synapse_appservice_api_sent_events", "Number of events sent to the AS", ["service"]
|
||||
)
|
||||
|
||||
HOUR_IN_MS = 60 * 60 * 1000
|
||||
|
@ -92,8 +90,9 @@ class ApplicationServiceApi(SimpleHttpClient):
|
|||
super(ApplicationServiceApi, self).__init__(hs)
|
||||
self.clock = hs.get_clock()
|
||||
|
||||
self.protocol_meta_cache = ResponseCache(hs, "as_protocol_meta",
|
||||
timeout_ms=HOUR_IN_MS)
|
||||
self.protocol_meta_cache = ResponseCache(
|
||||
hs, "as_protocol_meta", timeout_ms=HOUR_IN_MS
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def query_user(self, service, user_id):
|
||||
|
@ -102,9 +101,7 @@ class ApplicationServiceApi(SimpleHttpClient):
|
|||
uri = service.url + ("/users/%s" % urllib.parse.quote(user_id))
|
||||
response = None
|
||||
try:
|
||||
response = yield self.get_json(uri, {
|
||||
"access_token": service.hs_token
|
||||
})
|
||||
response = yield self.get_json(uri, {"access_token": service.hs_token})
|
||||
if response is not None: # just an empty json object
|
||||
defer.returnValue(True)
|
||||
except CodeMessageException as e:
|
||||
|
@ -123,9 +120,7 @@ class ApplicationServiceApi(SimpleHttpClient):
|
|||
uri = service.url + ("/rooms/%s" % urllib.parse.quote(alias))
|
||||
response = None
|
||||
try:
|
||||
response = yield self.get_json(uri, {
|
||||
"access_token": service.hs_token
|
||||
})
|
||||
response = yield self.get_json(uri, {"access_token": service.hs_token})
|
||||
if response is not None: # just an empty json object
|
||||
defer.returnValue(True)
|
||||
except CodeMessageException as e:
|
||||
|
@ -144,9 +139,7 @@ class ApplicationServiceApi(SimpleHttpClient):
|
|||
elif kind == ThirdPartyEntityKind.LOCATION:
|
||||
required_field = "alias"
|
||||
else:
|
||||
raise ValueError(
|
||||
"Unrecognised 'kind' argument %r to query_3pe()", kind
|
||||
)
|
||||
raise ValueError("Unrecognised 'kind' argument %r to query_3pe()", kind)
|
||||
if service.url is None:
|
||||
defer.returnValue([])
|
||||
|
||||
|
@ -154,14 +147,13 @@ class ApplicationServiceApi(SimpleHttpClient):
|
|||
service.url,
|
||||
APP_SERVICE_PREFIX,
|
||||
kind,
|
||||
urllib.parse.quote(protocol)
|
||||
urllib.parse.quote(protocol),
|
||||
)
|
||||
try:
|
||||
response = yield self.get_json(uri, fields)
|
||||
if not isinstance(response, list):
|
||||
logger.warning(
|
||||
"query_3pe to %s returned an invalid response %r",
|
||||
uri, response
|
||||
"query_3pe to %s returned an invalid response %r", uri, response
|
||||
)
|
||||
defer.returnValue([])
|
||||
|
||||
|
@ -171,8 +163,7 @@ class ApplicationServiceApi(SimpleHttpClient):
|
|||
ret.append(r)
|
||||
else:
|
||||
logger.warning(
|
||||
"query_3pe to %s returned an invalid result %r",
|
||||
uri, r
|
||||
"query_3pe to %s returned an invalid result %r", uri, r
|
||||
)
|
||||
|
||||
defer.returnValue(ret)
|
||||
|
@ -189,27 +180,27 @@ class ApplicationServiceApi(SimpleHttpClient):
|
|||
uri = "%s%s/thirdparty/protocol/%s" % (
|
||||
service.url,
|
||||
APP_SERVICE_PREFIX,
|
||||
urllib.parse.quote(protocol)
|
||||
urllib.parse.quote(protocol),
|
||||
)
|
||||
try:
|
||||
info = yield self.get_json(uri, {})
|
||||
|
||||
if not _is_valid_3pe_metadata(info):
|
||||
logger.warning("query_3pe_protocol to %s did not return a"
|
||||
" valid result", uri)
|
||||
logger.warning(
|
||||
"query_3pe_protocol to %s did not return a" " valid result", uri
|
||||
)
|
||||
defer.returnValue(None)
|
||||
|
||||
for instance in info.get("instances", []):
|
||||
network_id = instance.get("network_id", None)
|
||||
if network_id is not None:
|
||||
instance["instance_id"] = ThirdPartyInstanceID(
|
||||
service.id, network_id,
|
||||
service.id, network_id
|
||||
).to_string()
|
||||
|
||||
defer.returnValue(info)
|
||||
except Exception as ex:
|
||||
logger.warning("query_3pe_protocol to %s threw exception %s",
|
||||
uri, ex)
|
||||
logger.warning("query_3pe_protocol to %s threw exception %s", uri, ex)
|
||||
defer.returnValue(None)
|
||||
|
||||
key = (service.id, protocol)
|
||||
|
@ -223,22 +214,19 @@ class ApplicationServiceApi(SimpleHttpClient):
|
|||
events = self._serialize(events)
|
||||
|
||||
if txn_id is None:
|
||||
logger.warning("push_bulk: Missing txn ID sending events to %s",
|
||||
service.url)
|
||||
logger.warning(
|
||||
"push_bulk: Missing txn ID sending events to %s", service.url
|
||||
)
|
||||
txn_id = str(0)
|
||||
txn_id = str(txn_id)
|
||||
|
||||
uri = service.url + ("/transactions/%s" %
|
||||
urllib.parse.quote(txn_id))
|
||||
uri = service.url + ("/transactions/%s" % urllib.parse.quote(txn_id))
|
||||
try:
|
||||
yield self.put_json(
|
||||
uri=uri,
|
||||
json_body={
|
||||
"events": events
|
||||
},
|
||||
args={
|
||||
"access_token": service.hs_token
|
||||
})
|
||||
json_body={"events": events},
|
||||
args={"access_token": service.hs_token},
|
||||
)
|
||||
sent_transactions_counter.labels(service.id).inc()
|
||||
sent_events_counter.labels(service.id).inc(len(events))
|
||||
defer.returnValue(True)
|
||||
|
@ -252,6 +240,4 @@ class ApplicationServiceApi(SimpleHttpClient):
|
|||
|
||||
def _serialize(self, events):
|
||||
time_now = self.clock.time_msec()
|
||||
return [
|
||||
serialize_event(e, time_now, as_client_event=True) for e in events
|
||||
]
|
||||
return [serialize_event(e, time_now, as_client_event=True) for e in events]
|
||||
|
|
|
@ -112,15 +112,14 @@ class _ServiceQueuer(object):
|
|||
return
|
||||
|
||||
run_as_background_process(
|
||||
"as-sender-%s" % (service.id, ),
|
||||
self._send_request, service,
|
||||
"as-sender-%s" % (service.id,), self._send_request, service
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _send_request(self, service):
|
||||
# sanity-check: we shouldn't get here if this service already has a sender
|
||||
# running.
|
||||
assert(service.id not in self.requests_in_flight)
|
||||
assert service.id not in self.requests_in_flight
|
||||
|
||||
self.requests_in_flight.add(service.id)
|
||||
try:
|
||||
|
@ -137,7 +136,6 @@ class _ServiceQueuer(object):
|
|||
|
||||
|
||||
class _TransactionController(object):
|
||||
|
||||
def __init__(self, clock, store, as_api, recoverer_fn):
|
||||
self.clock = clock
|
||||
self.store = store
|
||||
|
@ -149,10 +147,7 @@ class _TransactionController(object):
|
|||
@defer.inlineCallbacks
|
||||
def send(self, service, events):
|
||||
try:
|
||||
txn = yield self.store.create_appservice_txn(
|
||||
service=service,
|
||||
events=events
|
||||
)
|
||||
txn = yield self.store.create_appservice_txn(service=service, events=events)
|
||||
service_is_up = yield self._is_service_up(service)
|
||||
if service_is_up:
|
||||
sent = yield txn.send(self.as_api)
|
||||
|
@ -167,12 +162,12 @@ class _TransactionController(object):
|
|||
@defer.inlineCallbacks
|
||||
def on_recovered(self, recoverer):
|
||||
self.recoverers.remove(recoverer)
|
||||
logger.info("Successfully recovered application service AS ID %s",
|
||||
recoverer.service.id)
|
||||
logger.info(
|
||||
"Successfully recovered application service AS ID %s", recoverer.service.id
|
||||
)
|
||||
logger.info("Remaining active recoverers: %s", len(self.recoverers))
|
||||
yield self.store.set_appservice_state(
|
||||
recoverer.service,
|
||||
ApplicationServiceState.UP
|
||||
recoverer.service, ApplicationServiceState.UP
|
||||
)
|
||||
|
||||
def add_recoverers(self, recoverers):
|
||||
|
@ -184,13 +179,10 @@ class _TransactionController(object):
|
|||
@defer.inlineCallbacks
|
||||
def _start_recoverer(self, service):
|
||||
try:
|
||||
yield self.store.set_appservice_state(
|
||||
service,
|
||||
ApplicationServiceState.DOWN
|
||||
)
|
||||
yield self.store.set_appservice_state(service, ApplicationServiceState.DOWN)
|
||||
logger.info(
|
||||
"Application service falling behind. Starting recoverer. AS ID %s",
|
||||
service.id
|
||||
service.id,
|
||||
)
|
||||
recoverer = self.recoverer_fn(service, self.on_recovered)
|
||||
self.add_recoverers([recoverer])
|
||||
|
@ -205,19 +197,16 @@ class _TransactionController(object):
|
|||
|
||||
|
||||
class _Recoverer(object):
|
||||
|
||||
@staticmethod
|
||||
@defer.inlineCallbacks
|
||||
def start(clock, store, as_api, callback):
|
||||
services = yield store.get_appservices_by_state(
|
||||
ApplicationServiceState.DOWN
|
||||
)
|
||||
recoverers = [
|
||||
_Recoverer(clock, store, as_api, s, callback) for s in services
|
||||
]
|
||||
services = yield store.get_appservices_by_state(ApplicationServiceState.DOWN)
|
||||
recoverers = [_Recoverer(clock, store, as_api, s, callback) for s in services]
|
||||
for r in recoverers:
|
||||
logger.info("Starting recoverer for AS ID %s which was marked as "
|
||||
"DOWN", r.service.id)
|
||||
logger.info(
|
||||
"Starting recoverer for AS ID %s which was marked as " "DOWN",
|
||||
r.service.id,
|
||||
)
|
||||
r.recover()
|
||||
defer.returnValue(recoverers)
|
||||
|
||||
|
@ -232,9 +221,9 @@ class _Recoverer(object):
|
|||
def recover(self):
|
||||
def _retry():
|
||||
run_as_background_process(
|
||||
"as-recoverer-%s" % (self.service.id,),
|
||||
self.retry,
|
||||
"as-recoverer-%s" % (self.service.id,), self.retry
|
||||
)
|
||||
|
||||
self.clock.call_later((2 ** self.backoff_counter), _retry)
|
||||
|
||||
def _backoff(self):
|
||||
|
@ -248,8 +237,9 @@ class _Recoverer(object):
|
|||
try:
|
||||
txn = yield self.store.get_oldest_unsent_txn(self.service)
|
||||
if txn:
|
||||
logger.info("Retrying transaction %s for AS ID %s",
|
||||
txn.id, txn.service.id)
|
||||
logger.info(
|
||||
"Retrying transaction %s for AS ID %s", txn.id, txn.service.id
|
||||
)
|
||||
sent = yield txn.send(self.as_api)
|
||||
if sent:
|
||||
yield txn.complete(self.store)
|
||||
|
|
|
@ -284,8 +284,8 @@ class Config(object):
|
|||
if not config_files:
|
||||
config_parser.error(
|
||||
"Must supply a config file.\nA config file can be automatically"
|
||||
" generated using \"--generate-config -H SERVER_NAME"
|
||||
" -c CONFIG-FILE\""
|
||||
' generated using "--generate-config -H SERVER_NAME'
|
||||
' -c CONFIG-FILE"'
|
||||
)
|
||||
(config_path,) = config_files
|
||||
if not cls.path_exists(config_path):
|
||||
|
@ -313,9 +313,7 @@ class Config(object):
|
|||
if not cls.path_exists(config_dir_path):
|
||||
os.makedirs(config_dir_path)
|
||||
with open(config_path, "w") as config_file:
|
||||
config_file.write(
|
||||
"# vim:ft=yaml\n\n"
|
||||
)
|
||||
config_file.write("# vim:ft=yaml\n\n")
|
||||
config_file.write(config_str)
|
||||
|
||||
config = yaml.safe_load(config_str)
|
||||
|
@ -352,8 +350,8 @@ class Config(object):
|
|||
if not config_files:
|
||||
config_parser.error(
|
||||
"Must supply a config file.\nA config file can be automatically"
|
||||
" generated using \"--generate-config -H SERVER_NAME"
|
||||
" -c CONFIG-FILE\""
|
||||
' generated using "--generate-config -H SERVER_NAME'
|
||||
' -c CONFIG-FILE"'
|
||||
)
|
||||
|
||||
obj.read_config_files(
|
||||
|
|
|
@ -18,15 +18,17 @@ from ._base import Config
|
|||
|
||||
|
||||
class ApiConfig(Config):
|
||||
|
||||
def read_config(self, config):
|
||||
self.room_invite_state_types = config.get("room_invite_state_types", [
|
||||
EventTypes.JoinRules,
|
||||
EventTypes.CanonicalAlias,
|
||||
EventTypes.RoomAvatar,
|
||||
EventTypes.RoomEncryption,
|
||||
EventTypes.Name,
|
||||
])
|
||||
self.room_invite_state_types = config.get(
|
||||
"room_invite_state_types",
|
||||
[
|
||||
EventTypes.JoinRules,
|
||||
EventTypes.CanonicalAlias,
|
||||
EventTypes.RoomAvatar,
|
||||
EventTypes.RoomEncryption,
|
||||
EventTypes.Name,
|
||||
],
|
||||
)
|
||||
|
||||
def default_config(cls, **kwargs):
|
||||
return """\
|
||||
|
@ -40,4 +42,6 @@ class ApiConfig(Config):
|
|||
# - "{RoomAvatar}"
|
||||
# - "{RoomEncryption}"
|
||||
# - "{Name}"
|
||||
""".format(**vars(EventTypes))
|
||||
""".format(
|
||||
**vars(EventTypes)
|
||||
)
|
||||
|
|
|
@ -29,7 +29,6 @@ logger = logging.getLogger(__name__)
|
|||
|
||||
|
||||
class AppServiceConfig(Config):
|
||||
|
||||
def read_config(self, config):
|
||||
self.app_service_config_files = config.get("app_service_config_files", [])
|
||||
self.notify_appservices = config.get("notify_appservices", True)
|
||||
|
@ -53,9 +52,7 @@ class AppServiceConfig(Config):
|
|||
def load_appservices(hostname, config_files):
|
||||
"""Returns a list of Application Services from the config files."""
|
||||
if not isinstance(config_files, list):
|
||||
logger.warning(
|
||||
"Expected %s to be a list of AS config files.", config_files
|
||||
)
|
||||
logger.warning("Expected %s to be a list of AS config files.", config_files)
|
||||
return []
|
||||
|
||||
# Dicts of value -> filename
|
||||
|
@ -66,22 +63,20 @@ def load_appservices(hostname, config_files):
|
|||
|
||||
for config_file in config_files:
|
||||
try:
|
||||
with open(config_file, 'r') as f:
|
||||
appservice = _load_appservice(
|
||||
hostname, yaml.safe_load(f), config_file
|
||||
)
|
||||
with open(config_file, "r") as f:
|
||||
appservice = _load_appservice(hostname, yaml.safe_load(f), config_file)
|
||||
if appservice.id in seen_ids:
|
||||
raise ConfigError(
|
||||
"Cannot reuse ID across application services: "
|
||||
"%s (files: %s, %s)" % (
|
||||
appservice.id, config_file, seen_ids[appservice.id],
|
||||
)
|
||||
"%s (files: %s, %s)"
|
||||
% (appservice.id, config_file, seen_ids[appservice.id])
|
||||
)
|
||||
seen_ids[appservice.id] = config_file
|
||||
if appservice.token in seen_as_tokens:
|
||||
raise ConfigError(
|
||||
"Cannot reuse as_token across application services: "
|
||||
"%s (files: %s, %s)" % (
|
||||
"%s (files: %s, %s)"
|
||||
% (
|
||||
appservice.token,
|
||||
config_file,
|
||||
seen_as_tokens[appservice.token],
|
||||
|
@ -98,28 +93,26 @@ def load_appservices(hostname, config_files):
|
|||
|
||||
|
||||
def _load_appservice(hostname, as_info, config_filename):
|
||||
required_string_fields = [
|
||||
"id", "as_token", "hs_token", "sender_localpart"
|
||||
]
|
||||
required_string_fields = ["id", "as_token", "hs_token", "sender_localpart"]
|
||||
for field in required_string_fields:
|
||||
if not isinstance(as_info.get(field), string_types):
|
||||
raise KeyError("Required string field: '%s' (%s)" % (
|
||||
field, config_filename,
|
||||
))
|
||||
raise KeyError(
|
||||
"Required string field: '%s' (%s)" % (field, config_filename)
|
||||
)
|
||||
|
||||
# 'url' must either be a string or explicitly null, not missing
|
||||
# to avoid accidentally turning off push for ASes.
|
||||
if (not isinstance(as_info.get("url"), string_types) and
|
||||
as_info.get("url", "") is not None):
|
||||
if (
|
||||
not isinstance(as_info.get("url"), string_types)
|
||||
and as_info.get("url", "") is not None
|
||||
):
|
||||
raise KeyError(
|
||||
"Required string field or explicit null: 'url' (%s)" % (config_filename,)
|
||||
)
|
||||
|
||||
localpart = as_info["sender_localpart"]
|
||||
if urlparse.quote(localpart) != localpart:
|
||||
raise ValueError(
|
||||
"sender_localpart needs characters which are not URL encoded."
|
||||
)
|
||||
raise ValueError("sender_localpart needs characters which are not URL encoded.")
|
||||
user = UserID(localpart, hostname)
|
||||
user_id = user.to_string()
|
||||
|
||||
|
@ -138,13 +131,12 @@ def _load_appservice(hostname, as_info, config_filename):
|
|||
for regex_obj in as_info["namespaces"][ns]:
|
||||
if not isinstance(regex_obj, dict):
|
||||
raise ValueError(
|
||||
"Expected namespace entry in %s to be an object,"
|
||||
" but got %s", ns, regex_obj
|
||||
"Expected namespace entry in %s to be an object," " but got %s",
|
||||
ns,
|
||||
regex_obj,
|
||||
)
|
||||
if not isinstance(regex_obj.get("regex"), string_types):
|
||||
raise ValueError(
|
||||
"Missing/bad type 'regex' key in %s", regex_obj
|
||||
)
|
||||
raise ValueError("Missing/bad type 'regex' key in %s", regex_obj)
|
||||
if not isinstance(regex_obj.get("exclusive"), bool):
|
||||
raise ValueError(
|
||||
"Missing/bad type 'exclusive' key in %s", regex_obj
|
||||
|
@ -167,10 +159,8 @@ def _load_appservice(hostname, as_info, config_filename):
|
|||
)
|
||||
|
||||
ip_range_whitelist = None
|
||||
if as_info.get('ip_range_whitelist'):
|
||||
ip_range_whitelist = IPSet(
|
||||
as_info.get('ip_range_whitelist')
|
||||
)
|
||||
if as_info.get("ip_range_whitelist"):
|
||||
ip_range_whitelist = IPSet(as_info.get("ip_range_whitelist"))
|
||||
|
||||
return ApplicationService(
|
||||
token=as_info["as_token"],
|
||||
|
|
|
@ -16,7 +16,6 @@ from ._base import Config
|
|||
|
||||
|
||||
class CaptchaConfig(Config):
|
||||
|
||||
def read_config(self, config):
|
||||
self.recaptcha_private_key = config.get("recaptcha_private_key")
|
||||
self.recaptcha_public_key = config.get("recaptcha_public_key")
|
||||
|
|
|
@ -89,29 +89,26 @@ class ConsentConfig(Config):
|
|||
if consent_config is None:
|
||||
return
|
||||
self.user_consent_version = str(consent_config["version"])
|
||||
self.user_consent_template_dir = self.abspath(
|
||||
consent_config["template_dir"]
|
||||
)
|
||||
self.user_consent_template_dir = self.abspath(consent_config["template_dir"])
|
||||
if not path.isdir(self.user_consent_template_dir):
|
||||
raise ConfigError(
|
||||
"Could not find template directory '%s'" % (
|
||||
self.user_consent_template_dir,
|
||||
),
|
||||
"Could not find template directory '%s'"
|
||||
% (self.user_consent_template_dir,)
|
||||
)
|
||||
self.user_consent_server_notice_content = consent_config.get(
|
||||
"server_notice_content",
|
||||
"server_notice_content"
|
||||
)
|
||||
self.block_events_without_consent_error = consent_config.get(
|
||||
"block_events_error",
|
||||
"block_events_error"
|
||||
)
|
||||
self.user_consent_server_notice_to_guests = bool(
|
||||
consent_config.get("send_server_notice_to_guests", False)
|
||||
)
|
||||
self.user_consent_at_registration = bool(
|
||||
consent_config.get("require_at_registration", False)
|
||||
)
|
||||
self.user_consent_server_notice_to_guests = bool(consent_config.get(
|
||||
"send_server_notice_to_guests", False,
|
||||
))
|
||||
self.user_consent_at_registration = bool(consent_config.get(
|
||||
"require_at_registration", False,
|
||||
))
|
||||
self.user_consent_policy_name = consent_config.get(
|
||||
"policy_name", "Privacy Policy",
|
||||
"policy_name", "Privacy Policy"
|
||||
)
|
||||
|
||||
def default_config(self, **kwargs):
|
||||
|
|
|
@ -18,29 +18,21 @@ from ._base import Config
|
|||
|
||||
|
||||
class DatabaseConfig(Config):
|
||||
|
||||
def read_config(self, config):
|
||||
self.event_cache_size = self.parse_size(
|
||||
config.get("event_cache_size", "10K")
|
||||
)
|
||||
self.event_cache_size = self.parse_size(config.get("event_cache_size", "10K"))
|
||||
|
||||
self.database_config = config.get("database")
|
||||
|
||||
if self.database_config is None:
|
||||
self.database_config = {
|
||||
"name": "sqlite3",
|
||||
"args": {},
|
||||
}
|
||||
self.database_config = {"name": "sqlite3", "args": {}}
|
||||
|
||||
name = self.database_config.get("name", None)
|
||||
if name == "psycopg2":
|
||||
pass
|
||||
elif name == "sqlite3":
|
||||
self.database_config.setdefault("args", {}).update({
|
||||
"cp_min": 1,
|
||||
"cp_max": 1,
|
||||
"check_same_thread": False,
|
||||
})
|
||||
self.database_config.setdefault("args", {}).update(
|
||||
{"cp_min": 1, "cp_max": 1, "check_same_thread": False}
|
||||
)
|
||||
else:
|
||||
raise RuntimeError("Unsupported database type '%s'" % (name,))
|
||||
|
||||
|
@ -48,7 +40,8 @@ class DatabaseConfig(Config):
|
|||
|
||||
def default_config(self, data_dir_path, **kwargs):
|
||||
database_path = os.path.join(data_dir_path, "homeserver.db")
|
||||
return """\
|
||||
return (
|
||||
"""\
|
||||
## Database ##
|
||||
|
||||
database:
|
||||
|
@ -62,7 +55,9 @@ class DatabaseConfig(Config):
|
|||
# Number of events to cache in memory.
|
||||
#
|
||||
#event_cache_size: 10K
|
||||
""" % locals()
|
||||
"""
|
||||
% locals()
|
||||
)
|
||||
|
||||
def read_arguments(self, args):
|
||||
self.set_databasepath(args.database_path)
|
||||
|
@ -77,6 +72,8 @@ class DatabaseConfig(Config):
|
|||
def add_arguments(self, parser):
|
||||
db_group = parser.add_argument_group("database")
|
||||
db_group.add_argument(
|
||||
"-d", "--database-path", metavar="SQLITE_DATABASE_PATH",
|
||||
help="The path to a sqlite database to use."
|
||||
"-d",
|
||||
"--database-path",
|
||||
metavar="SQLITE_DATABASE_PATH",
|
||||
help="The path to a sqlite database to use.",
|
||||
)
|
||||
|
|
|
@ -56,7 +56,7 @@ class EmailConfig(Config):
|
|||
if self.email_notif_from is not None:
|
||||
# make sure it's valid
|
||||
parsed = email.utils.parseaddr(self.email_notif_from)
|
||||
if parsed[1] == '':
|
||||
if parsed[1] == "":
|
||||
raise RuntimeError("Invalid notif_from address")
|
||||
|
||||
template_dir = email_config.get("template_dir")
|
||||
|
@ -65,19 +65,17 @@ class EmailConfig(Config):
|
|||
# (Note that loading as package_resources with jinja.PackageLoader doesn't
|
||||
# work for the same reason.)
|
||||
if not template_dir:
|
||||
template_dir = pkg_resources.resource_filename(
|
||||
'synapse', 'res/templates'
|
||||
)
|
||||
template_dir = pkg_resources.resource_filename("synapse", "res/templates")
|
||||
|
||||
self.email_template_dir = os.path.abspath(template_dir)
|
||||
|
||||
self.email_enable_notifs = email_config.get("enable_notifs", False)
|
||||
account_validity_renewal_enabled = config.get(
|
||||
"account_validity", {},
|
||||
).get("renew_at")
|
||||
account_validity_renewal_enabled = config.get("account_validity", {}).get(
|
||||
"renew_at"
|
||||
)
|
||||
|
||||
email_trust_identity_server_for_password_resets = email_config.get(
|
||||
"trust_identity_server_for_password_resets", False,
|
||||
"trust_identity_server_for_password_resets", False
|
||||
)
|
||||
self.email_password_reset_behaviour = (
|
||||
"remote" if email_trust_identity_server_for_password_resets else "local"
|
||||
|
@ -103,62 +101,59 @@ class EmailConfig(Config):
|
|||
# make sure we can import the required deps
|
||||
import jinja2
|
||||
import bleach
|
||||
|
||||
# prevent unused warnings
|
||||
jinja2
|
||||
bleach
|
||||
|
||||
if self.email_password_reset_behaviour == "local":
|
||||
required = [
|
||||
"smtp_host",
|
||||
"smtp_port",
|
||||
"notif_from",
|
||||
]
|
||||
required = ["smtp_host", "smtp_port", "notif_from"]
|
||||
|
||||
missing = []
|
||||
for k in required:
|
||||
if k not in email_config:
|
||||
missing.append(k)
|
||||
|
||||
if (len(missing) > 0):
|
||||
if len(missing) > 0:
|
||||
raise RuntimeError(
|
||||
"email.password_reset_behaviour is set to 'local' "
|
||||
"but required keys are missing: %s" %
|
||||
(", ".join(["email." + k for k in missing]),)
|
||||
"but required keys are missing: %s"
|
||||
% (", ".join(["email." + k for k in missing]),)
|
||||
)
|
||||
|
||||
# Templates for password reset emails
|
||||
self.email_password_reset_template_html = email_config.get(
|
||||
"password_reset_template_html", "password_reset.html",
|
||||
"password_reset_template_html", "password_reset.html"
|
||||
)
|
||||
self.email_password_reset_template_text = email_config.get(
|
||||
"password_reset_template_text", "password_reset.txt",
|
||||
"password_reset_template_text", "password_reset.txt"
|
||||
)
|
||||
self.email_password_reset_failure_template = email_config.get(
|
||||
"password_reset_failure_template", "password_reset_failure.html",
|
||||
"password_reset_failure_template", "password_reset_failure.html"
|
||||
)
|
||||
# This template does not support any replaceable variables, so we will
|
||||
# read it from the disk once during setup
|
||||
email_password_reset_success_template = email_config.get(
|
||||
"password_reset_success_template", "password_reset_success.html",
|
||||
"password_reset_success_template", "password_reset_success.html"
|
||||
)
|
||||
|
||||
# Check templates exist
|
||||
for f in [self.email_password_reset_template_html,
|
||||
self.email_password_reset_template_text,
|
||||
self.email_password_reset_failure_template,
|
||||
email_password_reset_success_template]:
|
||||
for f in [
|
||||
self.email_password_reset_template_html,
|
||||
self.email_password_reset_template_text,
|
||||
self.email_password_reset_failure_template,
|
||||
email_password_reset_success_template,
|
||||
]:
|
||||
p = os.path.join(self.email_template_dir, f)
|
||||
if not os.path.isfile(p):
|
||||
raise ConfigError("Unable to find template file %s" % (p, ))
|
||||
raise ConfigError("Unable to find template file %s" % (p,))
|
||||
|
||||
# Retrieve content of web templates
|
||||
filepath = os.path.join(
|
||||
self.email_template_dir,
|
||||
email_password_reset_success_template,
|
||||
self.email_template_dir, email_password_reset_success_template
|
||||
)
|
||||
self.email_password_reset_success_html_content = self.read_file(
|
||||
filepath,
|
||||
"email.password_reset_template_success_html",
|
||||
filepath, "email.password_reset_template_success_html"
|
||||
)
|
||||
|
||||
if config.get("public_baseurl") is None:
|
||||
|
@ -182,10 +177,10 @@ class EmailConfig(Config):
|
|||
if k not in email_config:
|
||||
missing.append(k)
|
||||
|
||||
if (len(missing) > 0):
|
||||
if len(missing) > 0:
|
||||
raise RuntimeError(
|
||||
"email.enable_notifs is True but required keys are missing: %s" %
|
||||
(", ".join(["email." + k for k in missing]),)
|
||||
"email.enable_notifs is True but required keys are missing: %s"
|
||||
% (", ".join(["email." + k for k in missing]),)
|
||||
)
|
||||
|
||||
if config.get("public_baseurl") is None:
|
||||
|
@ -199,27 +194,25 @@ class EmailConfig(Config):
|
|||
for f in self.email_notif_template_text, self.email_notif_template_html:
|
||||
p = os.path.join(self.email_template_dir, f)
|
||||
if not os.path.isfile(p):
|
||||
raise ConfigError("Unable to find email template file %s" % (p, ))
|
||||
raise ConfigError("Unable to find email template file %s" % (p,))
|
||||
|
||||
self.email_notif_for_new_users = email_config.get(
|
||||
"notif_for_new_users", True
|
||||
)
|
||||
self.email_riot_base_url = email_config.get(
|
||||
"riot_base_url", None
|
||||
)
|
||||
self.email_riot_base_url = email_config.get("riot_base_url", None)
|
||||
|
||||
if account_validity_renewal_enabled:
|
||||
self.email_expiry_template_html = email_config.get(
|
||||
"expiry_template_html", "notice_expiry.html",
|
||||
"expiry_template_html", "notice_expiry.html"
|
||||
)
|
||||
self.email_expiry_template_text = email_config.get(
|
||||
"expiry_template_text", "notice_expiry.txt",
|
||||
"expiry_template_text", "notice_expiry.txt"
|
||||
)
|
||||
|
||||
for f in self.email_expiry_template_text, self.email_expiry_template_html:
|
||||
p = os.path.join(self.email_template_dir, f)
|
||||
if not os.path.isfile(p):
|
||||
raise ConfigError("Unable to find email template file %s" % (p, ))
|
||||
raise ConfigError("Unable to find email template file %s" % (p,))
|
||||
|
||||
def default_config(self, config_dir_path, server_name, **kwargs):
|
||||
return """
|
||||
|
|
|
@ -15,13 +15,11 @@
|
|||
|
||||
from ._base import Config, ConfigError
|
||||
|
||||
MISSING_JWT = (
|
||||
"""Missing jwt library. This is required for jwt login.
|
||||
MISSING_JWT = """Missing jwt library. This is required for jwt login.
|
||||
|
||||
Install by running:
|
||||
pip install pyjwt
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
class JWTConfig(Config):
|
||||
|
@ -34,6 +32,7 @@ class JWTConfig(Config):
|
|||
|
||||
try:
|
||||
import jwt
|
||||
|
||||
jwt # To stop unused lint.
|
||||
except ImportError:
|
||||
raise ConfigError(MISSING_JWT)
|
||||
|
|
|
@ -348,9 +348,8 @@ def _parse_key_servers(key_servers, federation_verify_certificates):
|
|||
|
||||
result.verify_keys[key_id] = verify_key
|
||||
|
||||
if (
|
||||
not federation_verify_certificates and
|
||||
not server.get("accept_keys_insecurely")
|
||||
if not federation_verify_certificates and not server.get(
|
||||
"accept_keys_insecurely"
|
||||
):
|
||||
_assert_keyserver_has_verify_keys(result)
|
||||
|
||||
|
|
|
@ -29,7 +29,8 @@ from synapse.util.versionstring import get_version_string
|
|||
|
||||
from ._base import Config
|
||||
|
||||
DEFAULT_LOG_CONFIG = Template("""
|
||||
DEFAULT_LOG_CONFIG = Template(
|
||||
"""
|
||||
version: 1
|
||||
|
||||
formatters:
|
||||
|
@ -68,11 +69,11 @@ loggers:
|
|||
root:
|
||||
level: INFO
|
||||
handlers: [file, console]
|
||||
""")
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
class LoggingConfig(Config):
|
||||
|
||||
def read_config(self, config):
|
||||
self.verbosity = config.get("verbose", 0)
|
||||
self.no_redirect_stdio = config.get("no_redirect_stdio", False)
|
||||
|
@ -81,13 +82,16 @@ class LoggingConfig(Config):
|
|||
|
||||
def default_config(self, config_dir_path, server_name, **kwargs):
|
||||
log_config = os.path.join(config_dir_path, server_name + ".log.config")
|
||||
return """\
|
||||
return (
|
||||
"""\
|
||||
## Logging ##
|
||||
|
||||
# A yaml python logging config file
|
||||
#
|
||||
log_config: "%(log_config)s"
|
||||
""" % locals()
|
||||
"""
|
||||
% locals()
|
||||
)
|
||||
|
||||
def read_arguments(self, args):
|
||||
if args.verbose is not None:
|
||||
|
@ -102,22 +106,31 @@ class LoggingConfig(Config):
|
|||
def add_arguments(cls, parser):
|
||||
logging_group = parser.add_argument_group("logging")
|
||||
logging_group.add_argument(
|
||||
'-v', '--verbose', dest="verbose", action='count',
|
||||
"-v",
|
||||
"--verbose",
|
||||
dest="verbose",
|
||||
action="count",
|
||||
help="The verbosity level. Specify multiple times to increase "
|
||||
"verbosity. (Ignored if --log-config is specified.)"
|
||||
"verbosity. (Ignored if --log-config is specified.)",
|
||||
)
|
||||
logging_group.add_argument(
|
||||
'-f', '--log-file', dest="log_file",
|
||||
help="File to log to. (Ignored if --log-config is specified.)"
|
||||
"-f",
|
||||
"--log-file",
|
||||
dest="log_file",
|
||||
help="File to log to. (Ignored if --log-config is specified.)",
|
||||
)
|
||||
logging_group.add_argument(
|
||||
'--log-config', dest="log_config", default=None,
|
||||
help="Python logging config file"
|
||||
"--log-config",
|
||||
dest="log_config",
|
||||
default=None,
|
||||
help="Python logging config file",
|
||||
)
|
||||
logging_group.add_argument(
|
||||
'-n', '--no-redirect-stdio',
|
||||
action='store_true', default=None,
|
||||
help="Do not redirect stdout/stderr to the log"
|
||||
"-n",
|
||||
"--no-redirect-stdio",
|
||||
action="store_true",
|
||||
default=None,
|
||||
help="Do not redirect stdout/stderr to the log",
|
||||
)
|
||||
|
||||
def generate_files(self, config):
|
||||
|
@ -125,9 +138,7 @@ class LoggingConfig(Config):
|
|||
if log_config and not os.path.exists(log_config):
|
||||
log_file = self.abspath("homeserver.log")
|
||||
with open(log_config, "w") as log_config_file:
|
||||
log_config_file.write(
|
||||
DEFAULT_LOG_CONFIG.substitute(log_file=log_file)
|
||||
)
|
||||
log_config_file.write(DEFAULT_LOG_CONFIG.substitute(log_file=log_file))
|
||||
|
||||
|
||||
def setup_logging(config, use_worker_options=False):
|
||||
|
@ -143,10 +154,8 @@ def setup_logging(config, use_worker_options=False):
|
|||
register_sighup (func | None): Function to call to register a
|
||||
sighup handler.
|
||||
"""
|
||||
log_config = (config.worker_log_config if use_worker_options
|
||||
else config.log_config)
|
||||
log_file = (config.worker_log_file if use_worker_options
|
||||
else config.log_file)
|
||||
log_config = config.worker_log_config if use_worker_options else config.log_config
|
||||
log_file = config.worker_log_file if use_worker_options else config.log_file
|
||||
|
||||
log_format = (
|
||||
"%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(request)s"
|
||||
|
@ -164,23 +173,23 @@ def setup_logging(config, use_worker_options=False):
|
|||
if config.verbosity > 1:
|
||||
level_for_storage = logging.DEBUG
|
||||
|
||||
logger = logging.getLogger('')
|
||||
logger = logging.getLogger("")
|
||||
logger.setLevel(level)
|
||||
|
||||
logging.getLogger('synapse.storage.SQL').setLevel(level_for_storage)
|
||||
logging.getLogger("synapse.storage.SQL").setLevel(level_for_storage)
|
||||
|
||||
formatter = logging.Formatter(log_format)
|
||||
if log_file:
|
||||
# TODO: Customisable file size / backup count
|
||||
handler = logging.handlers.RotatingFileHandler(
|
||||
log_file, maxBytes=(1000 * 1000 * 100), backupCount=3,
|
||||
encoding='utf8'
|
||||
log_file, maxBytes=(1000 * 1000 * 100), backupCount=3, encoding="utf8"
|
||||
)
|
||||
|
||||
def sighup(signum, stack):
|
||||
logger.info("Closing log file due to SIGHUP")
|
||||
handler.doRollover()
|
||||
logger.info("Opened new log file due to SIGHUP")
|
||||
|
||||
else:
|
||||
handler = logging.StreamHandler()
|
||||
|
||||
|
@ -193,8 +202,9 @@ def setup_logging(config, use_worker_options=False):
|
|||
|
||||
logger.addHandler(handler)
|
||||
else:
|
||||
|
||||
def load_log_config():
|
||||
with open(log_config, 'r') as f:
|
||||
with open(log_config, "r") as f:
|
||||
logging.config.dictConfig(yaml.safe_load(f))
|
||||
|
||||
def sighup(*args):
|
||||
|
@ -209,10 +219,7 @@ def setup_logging(config, use_worker_options=False):
|
|||
# make sure that the first thing we log is a thing we can grep backwards
|
||||
# for
|
||||
logging.warn("***** STARTING SERVER *****")
|
||||
logging.warn(
|
||||
"Server %s version %s",
|
||||
sys.argv[0], get_version_string(synapse),
|
||||
)
|
||||
logging.warn("Server %s version %s", sys.argv[0], get_version_string(synapse))
|
||||
logging.info("Server hostname: %s", config.server_name)
|
||||
|
||||
# It's critical to point twisted's internal logging somewhere, otherwise it
|
||||
|
@ -242,8 +249,7 @@ def setup_logging(config, use_worker_options=False):
|
|||
return observer(event)
|
||||
|
||||
globalLogBeginner.beginLoggingTo(
|
||||
[_log],
|
||||
redirectStandardIO=not config.no_redirect_stdio,
|
||||
[_log], redirectStandardIO=not config.no_redirect_stdio
|
||||
)
|
||||
if not config.no_redirect_stdio:
|
||||
print("Redirected stdout/stderr to logs")
|
||||
|
|
|
@ -15,11 +15,9 @@
|
|||
|
||||
from ._base import Config, ConfigError
|
||||
|
||||
MISSING_SENTRY = (
|
||||
"""Missing sentry-sdk library. This is required to enable sentry
|
||||
MISSING_SENTRY = """Missing sentry-sdk library. This is required to enable sentry
|
||||
integration.
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
class MetricsConfig(Config):
|
||||
|
@ -39,7 +37,7 @@ class MetricsConfig(Config):
|
|||
self.sentry_dsn = config["sentry"].get("dsn")
|
||||
if not self.sentry_dsn:
|
||||
raise ConfigError(
|
||||
"sentry.dsn field is required when sentry integration is enabled",
|
||||
"sentry.dsn field is required when sentry integration is enabled"
|
||||
)
|
||||
|
||||
def default_config(self, report_stats=None, **kwargs):
|
||||
|
@ -66,6 +64,6 @@ class MetricsConfig(Config):
|
|||
if report_stats is None:
|
||||
res += "# report_stats: true|false\n"
|
||||
else:
|
||||
res += "report_stats: %s\n" % ('true' if report_stats else 'false')
|
||||
res += "report_stats: %s\n" % ("true" if report_stats else "false")
|
||||
|
||||
return res
|
||||
|
|
|
@ -17,7 +17,7 @@ from synapse.util.module_loader import load_module
|
|||
|
||||
from ._base import Config
|
||||
|
||||
LDAP_PROVIDER = 'ldap_auth_provider.LdapAuthProvider'
|
||||
LDAP_PROVIDER = "ldap_auth_provider.LdapAuthProvider"
|
||||
|
||||
|
||||
class PasswordAuthProviderConfig(Config):
|
||||
|
@ -29,24 +29,20 @@ class PasswordAuthProviderConfig(Config):
|
|||
# param.
|
||||
ldap_config = config.get("ldap_config", {})
|
||||
if ldap_config.get("enabled", False):
|
||||
providers.append({
|
||||
'module': LDAP_PROVIDER,
|
||||
'config': ldap_config,
|
||||
})
|
||||
providers.append({"module": LDAP_PROVIDER, "config": ldap_config})
|
||||
|
||||
providers.extend(config.get("password_providers", []))
|
||||
for provider in providers:
|
||||
mod_name = provider['module']
|
||||
mod_name = provider["module"]
|
||||
|
||||
# This is for backwards compat when the ldap auth provider resided
|
||||
# in this package.
|
||||
if mod_name == "synapse.util.ldap_auth_provider.LdapAuthProvider":
|
||||
mod_name = LDAP_PROVIDER
|
||||
|
||||
(provider_class, provider_config) = load_module({
|
||||
"module": mod_name,
|
||||
"config": provider['config'],
|
||||
})
|
||||
(provider_class, provider_config) = load_module(
|
||||
{"module": mod_name, "config": provider["config"]}
|
||||
)
|
||||
|
||||
self.password_providers.append((provider_class, provider_config))
|
||||
|
||||
|
|
|
@ -23,7 +23,7 @@ from synapse.util.stringutils import random_string_with_symbols
|
|||
class AccountValidityConfig(Config):
|
||||
def __init__(self, config, synapse_config):
|
||||
self.enabled = config.get("enabled", False)
|
||||
self.renew_by_email_enabled = ("renew_at" in config)
|
||||
self.renew_by_email_enabled = "renew_at" in config
|
||||
|
||||
if self.enabled:
|
||||
if "period" in config:
|
||||
|
@ -39,14 +39,13 @@ class AccountValidityConfig(Config):
|
|||
else:
|
||||
self.renew_email_subject = "Renew your %(app)s account"
|
||||
|
||||
self.startup_job_max_delta = self.period * 10. / 100.
|
||||
self.startup_job_max_delta = self.period * 10.0 / 100.0
|
||||
|
||||
if self.renew_by_email_enabled and "public_baseurl" not in synapse_config:
|
||||
raise ConfigError("Can't send renewal emails without 'public_baseurl'")
|
||||
|
||||
|
||||
class RegistrationConfig(Config):
|
||||
|
||||
def read_config(self, config):
|
||||
self.enable_registration = bool(
|
||||
strtobool(str(config.get("enable_registration", False)))
|
||||
|
@ -57,7 +56,7 @@ class RegistrationConfig(Config):
|
|||
)
|
||||
|
||||
self.account_validity = AccountValidityConfig(
|
||||
config.get("account_validity", {}), config,
|
||||
config.get("account_validity", {}), config
|
||||
)
|
||||
|
||||
self.registrations_require_3pid = config.get("registrations_require_3pid", [])
|
||||
|
@ -67,24 +66,23 @@ class RegistrationConfig(Config):
|
|||
|
||||
self.bcrypt_rounds = config.get("bcrypt_rounds", 12)
|
||||
self.trusted_third_party_id_servers = config.get(
|
||||
"trusted_third_party_id_servers",
|
||||
["matrix.org", "vector.im"],
|
||||
"trusted_third_party_id_servers", ["matrix.org", "vector.im"]
|
||||
)
|
||||
self.default_identity_server = config.get("default_identity_server")
|
||||
self.allow_guest_access = config.get("allow_guest_access", False)
|
||||
|
||||
self.invite_3pid_guest = (
|
||||
self.allow_guest_access and config.get("invite_3pid_guest", False)
|
||||
self.invite_3pid_guest = self.allow_guest_access and config.get(
|
||||
"invite_3pid_guest", False
|
||||
)
|
||||
|
||||
self.auto_join_rooms = config.get("auto_join_rooms", [])
|
||||
for room_alias in self.auto_join_rooms:
|
||||
if not RoomAlias.is_valid(room_alias):
|
||||
raise ConfigError('Invalid auto_join_rooms entry %s' % (room_alias,))
|
||||
raise ConfigError("Invalid auto_join_rooms entry %s" % (room_alias,))
|
||||
self.autocreate_auto_join_rooms = config.get("autocreate_auto_join_rooms", True)
|
||||
|
||||
self.disable_msisdn_registration = (
|
||||
config.get("disable_msisdn_registration", False)
|
||||
self.disable_msisdn_registration = config.get(
|
||||
"disable_msisdn_registration", False
|
||||
)
|
||||
|
||||
def default_config(self, generate_secrets=False, **kwargs):
|
||||
|
@ -93,9 +91,12 @@ class RegistrationConfig(Config):
|
|||
random_string_with_symbols(50),
|
||||
)
|
||||
else:
|
||||
registration_shared_secret = '# registration_shared_secret: <PRIVATE STRING>'
|
||||
registration_shared_secret = (
|
||||
"# registration_shared_secret: <PRIVATE STRING>"
|
||||
)
|
||||
|
||||
return """\
|
||||
return (
|
||||
"""\
|
||||
## Registration ##
|
||||
#
|
||||
# Registration can be rate-limited using the parameters in the "Ratelimiting"
|
||||
|
@ -217,17 +218,19 @@ class RegistrationConfig(Config):
|
|||
# users cannot be auto-joined since they do not exist.
|
||||
#
|
||||
#autocreate_auto_join_rooms: true
|
||||
""" % locals()
|
||||
"""
|
||||
% locals()
|
||||
)
|
||||
|
||||
def add_arguments(self, parser):
|
||||
reg_group = parser.add_argument_group("registration")
|
||||
reg_group.add_argument(
|
||||
"--enable-registration", action="store_true", default=None,
|
||||
help="Enable registration for new users."
|
||||
"--enable-registration",
|
||||
action="store_true",
|
||||
default=None,
|
||||
help="Enable registration for new users.",
|
||||
)
|
||||
|
||||
def read_arguments(self, args):
|
||||
if args.enable_registration is not None:
|
||||
self.enable_registration = bool(
|
||||
strtobool(str(args.enable_registration))
|
||||
)
|
||||
self.enable_registration = bool(strtobool(str(args.enable_registration)))
|
||||
|
|
|
@ -20,27 +20,11 @@ from synapse.util.module_loader import load_module
|
|||
from ._base import Config, ConfigError
|
||||
|
||||
DEFAULT_THUMBNAIL_SIZES = [
|
||||
{
|
||||
"width": 32,
|
||||
"height": 32,
|
||||
"method": "crop",
|
||||
}, {
|
||||
"width": 96,
|
||||
"height": 96,
|
||||
"method": "crop",
|
||||
}, {
|
||||
"width": 320,
|
||||
"height": 240,
|
||||
"method": "scale",
|
||||
}, {
|
||||
"width": 640,
|
||||
"height": 480,
|
||||
"method": "scale",
|
||||
}, {
|
||||
"width": 800,
|
||||
"height": 600,
|
||||
"method": "scale"
|
||||
},
|
||||
{"width": 32, "height": 32, "method": "crop"},
|
||||
{"width": 96, "height": 96, "method": "crop"},
|
||||
{"width": 320, "height": 240, "method": "scale"},
|
||||
{"width": 640, "height": 480, "method": "scale"},
|
||||
{"width": 800, "height": 600, "method": "scale"},
|
||||
]
|
||||
|
||||
THUMBNAIL_SIZE_YAML = """\
|
||||
|
@ -49,19 +33,15 @@ THUMBNAIL_SIZE_YAML = """\
|
|||
# method: %(method)s
|
||||
"""
|
||||
|
||||
MISSING_NETADDR = (
|
||||
"Missing netaddr library. This is required for URL preview API."
|
||||
)
|
||||
MISSING_NETADDR = "Missing netaddr library. This is required for URL preview API."
|
||||
|
||||
MISSING_LXML = (
|
||||
"""Missing lxml library. This is required for URL preview API.
|
||||
MISSING_LXML = """Missing lxml library. This is required for URL preview API.
|
||||
|
||||
Install by running:
|
||||
pip install lxml
|
||||
|
||||
Requires libxslt1-dev system package.
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
ThumbnailRequirement = namedtuple(
|
||||
|
@ -69,7 +49,8 @@ ThumbnailRequirement = namedtuple(
|
|||
)
|
||||
|
||||
MediaStorageProviderConfig = namedtuple(
|
||||
"MediaStorageProviderConfig", (
|
||||
"MediaStorageProviderConfig",
|
||||
(
|
||||
"store_local", # Whether to store newly uploaded local files
|
||||
"store_remote", # Whether to store newly downloaded remote files
|
||||
"store_synchronous", # Whether to wait for successful storage for local uploads
|
||||
|
@ -100,8 +81,7 @@ def parse_thumbnail_requirements(thumbnail_sizes):
|
|||
requirements.setdefault("image/gif", []).append(png_thumbnail)
|
||||
requirements.setdefault("image/png", []).append(png_thumbnail)
|
||||
return {
|
||||
media_type: tuple(thumbnails)
|
||||
for media_type, thumbnails in requirements.items()
|
||||
media_type: tuple(thumbnails) for media_type, thumbnails in requirements.items()
|
||||
}
|
||||
|
||||
|
||||
|
@ -127,15 +107,15 @@ class ContentRepositoryConfig(Config):
|
|||
"Cannot use both 'backup_media_store_path' and 'storage_providers'"
|
||||
)
|
||||
|
||||
storage_providers = [{
|
||||
"module": "file_system",
|
||||
"store_local": True,
|
||||
"store_synchronous": synchronous_backup_media_store,
|
||||
"store_remote": True,
|
||||
"config": {
|
||||
"directory": backup_media_store_path,
|
||||
storage_providers = [
|
||||
{
|
||||
"module": "file_system",
|
||||
"store_local": True,
|
||||
"store_synchronous": synchronous_backup_media_store,
|
||||
"store_remote": True,
|
||||
"config": {"directory": backup_media_store_path},
|
||||
}
|
||||
}]
|
||||
]
|
||||
|
||||
# This is a list of config that can be used to create the storage
|
||||
# providers. The entries are tuples of (Class, class_config,
|
||||
|
@ -165,18 +145,19 @@ class ContentRepositoryConfig(Config):
|
|||
)
|
||||
|
||||
self.media_storage_providers.append(
|
||||
(provider_class, parsed_config, wrapper_config,)
|
||||
(provider_class, parsed_config, wrapper_config)
|
||||
)
|
||||
|
||||
self.uploads_path = self.ensure_directory(config["uploads_path"])
|
||||
self.dynamic_thumbnails = config.get("dynamic_thumbnails", False)
|
||||
self.thumbnail_requirements = parse_thumbnail_requirements(
|
||||
config.get("thumbnail_sizes", DEFAULT_THUMBNAIL_SIZES),
|
||||
config.get("thumbnail_sizes", DEFAULT_THUMBNAIL_SIZES)
|
||||
)
|
||||
self.url_preview_enabled = config.get("url_preview_enabled", False)
|
||||
if self.url_preview_enabled:
|
||||
try:
|
||||
import lxml
|
||||
|
||||
lxml # To stop unused lint.
|
||||
except ImportError:
|
||||
raise ConfigError(MISSING_LXML)
|
||||
|
@ -199,15 +180,13 @@ class ContentRepositoryConfig(Config):
|
|||
|
||||
# we always blacklist '0.0.0.0' and '::', which are supposed to be
|
||||
# unroutable addresses.
|
||||
self.url_preview_ip_range_blacklist.update(['0.0.0.0', '::'])
|
||||
self.url_preview_ip_range_blacklist.update(["0.0.0.0", "::"])
|
||||
|
||||
self.url_preview_ip_range_whitelist = IPSet(
|
||||
config.get("url_preview_ip_range_whitelist", ())
|
||||
)
|
||||
|
||||
self.url_preview_url_blacklist = config.get(
|
||||
"url_preview_url_blacklist", ()
|
||||
)
|
||||
self.url_preview_url_blacklist = config.get("url_preview_url_blacklist", ())
|
||||
|
||||
def default_config(self, data_dir_path, **kwargs):
|
||||
media_store = os.path.join(data_dir_path, "media_store")
|
||||
|
@ -219,7 +198,8 @@ class ContentRepositoryConfig(Config):
|
|||
# strip final NL
|
||||
formatted_thumbnail_sizes = formatted_thumbnail_sizes[:-1]
|
||||
|
||||
return r"""
|
||||
return (
|
||||
r"""
|
||||
# Directory where uploaded images and attachments are stored.
|
||||
#
|
||||
media_store_path: "%(media_store)s"
|
||||
|
@ -342,4 +322,6 @@ class ContentRepositoryConfig(Config):
|
|||
# The largest allowed URL preview spidering size in bytes
|
||||
#
|
||||
#max_spider_size: 10M
|
||||
""" % locals()
|
||||
"""
|
||||
% locals()
|
||||
)
|
||||
|
|
|
@ -20,9 +20,7 @@ from ._base import Config, ConfigError
|
|||
|
||||
class RoomDirectoryConfig(Config):
|
||||
def read_config(self, config):
|
||||
self.enable_room_list_search = config.get(
|
||||
"enable_room_list_search", True,
|
||||
)
|
||||
self.enable_room_list_search = config.get("enable_room_list_search", True)
|
||||
|
||||
alias_creation_rules = config.get("alias_creation_rules")
|
||||
|
||||
|
@ -33,11 +31,7 @@ class RoomDirectoryConfig(Config):
|
|||
]
|
||||
else:
|
||||
self._alias_creation_rules = [
|
||||
_RoomDirectoryRule(
|
||||
"alias_creation_rules", {
|
||||
"action": "allow",
|
||||
}
|
||||
)
|
||||
_RoomDirectoryRule("alias_creation_rules", {"action": "allow"})
|
||||
]
|
||||
|
||||
room_list_publication_rules = config.get("room_list_publication_rules")
|
||||
|
@ -49,11 +43,7 @@ class RoomDirectoryConfig(Config):
|
|||
]
|
||||
else:
|
||||
self._room_list_publication_rules = [
|
||||
_RoomDirectoryRule(
|
||||
"room_list_publication_rules", {
|
||||
"action": "allow",
|
||||
}
|
||||
)
|
||||
_RoomDirectoryRule("room_list_publication_rules", {"action": "allow"})
|
||||
]
|
||||
|
||||
def default_config(self, config_dir_path, server_name, **kwargs):
|
||||
|
@ -178,8 +168,7 @@ class _RoomDirectoryRule(object):
|
|||
self.action = action
|
||||
else:
|
||||
raise ConfigError(
|
||||
"%s rules can only have action of 'allow'"
|
||||
" or 'deny'" % (option_name,)
|
||||
"%s rules can only have action of 'allow'" " or 'deny'" % (option_name,)
|
||||
)
|
||||
|
||||
self._alias_matches_all = alias == "*"
|
||||
|
|
|
@ -28,6 +28,7 @@ class SAML2Config(Config):
|
|||
self.saml2_enabled = True
|
||||
|
||||
import saml2.config
|
||||
|
||||
self.saml2_sp_config = saml2.config.SPConfig()
|
||||
self.saml2_sp_config.load(self._default_saml_config_dict())
|
||||
self.saml2_sp_config.load(saml2_config.get("sp_config", {}))
|
||||
|
@ -41,26 +42,23 @@ class SAML2Config(Config):
|
|||
|
||||
public_baseurl = self.public_baseurl
|
||||
if public_baseurl is None:
|
||||
raise ConfigError(
|
||||
"saml2_config requires a public_baseurl to be set"
|
||||
)
|
||||
raise ConfigError("saml2_config requires a public_baseurl to be set")
|
||||
|
||||
metadata_url = public_baseurl + "_matrix/saml2/metadata.xml"
|
||||
response_url = public_baseurl + "_matrix/saml2/authn_response"
|
||||
return {
|
||||
"entityid": metadata_url,
|
||||
|
||||
"service": {
|
||||
"sp": {
|
||||
"endpoints": {
|
||||
"assertion_consumer_service": [
|
||||
(response_url, saml2.BINDING_HTTP_POST),
|
||||
],
|
||||
(response_url, saml2.BINDING_HTTP_POST)
|
||||
]
|
||||
},
|
||||
"required_attributes": ["uid"],
|
||||
"optional_attributes": ["mail", "surname", "givenname"],
|
||||
},
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
def default_config(self, config_dir_path, server_name, **kwargs):
|
||||
|
@ -106,4 +104,6 @@ class SAML2Config(Config):
|
|||
# # separate pysaml2 configuration file:
|
||||
# #
|
||||
# config_path: "%(config_dir_path)s/sp_conf.py"
|
||||
""" % {"config_dir_path": config_dir_path}
|
||||
""" % {
|
||||
"config_dir_path": config_dir_path
|
||||
}
|
||||
|
|
|
@ -34,13 +34,12 @@ logger = logging.Logger(__name__)
|
|||
#
|
||||
# We later check for errors when binding to 0.0.0.0 and ignore them if :: is also in
|
||||
# in the list.
|
||||
DEFAULT_BIND_ADDRESSES = ['::', '0.0.0.0']
|
||||
DEFAULT_BIND_ADDRESSES = ["::", "0.0.0.0"]
|
||||
|
||||
DEFAULT_ROOM_VERSION = "4"
|
||||
|
||||
|
||||
class ServerConfig(Config):
|
||||
|
||||
def read_config(self, config):
|
||||
self.server_name = config["server_name"]
|
||||
self.server_context = config.get("server_context", None)
|
||||
|
@ -81,27 +80,25 @@ class ServerConfig(Config):
|
|||
# Whether to require authentication to retrieve profile data (avatars,
|
||||
# display names) of other users through the client API.
|
||||
self.require_auth_for_profile_requests = config.get(
|
||||
"require_auth_for_profile_requests", False,
|
||||
"require_auth_for_profile_requests", False
|
||||
)
|
||||
|
||||
# If set to 'True', requires authentication to access the server's
|
||||
# public rooms directory through the client API, and forbids any other
|
||||
# homeserver to fetch it via federation.
|
||||
self.restrict_public_rooms_to_local_users = config.get(
|
||||
"restrict_public_rooms_to_local_users", False,
|
||||
"restrict_public_rooms_to_local_users", False
|
||||
)
|
||||
|
||||
default_room_version = config.get(
|
||||
"default_room_version", DEFAULT_ROOM_VERSION,
|
||||
)
|
||||
default_room_version = config.get("default_room_version", DEFAULT_ROOM_VERSION)
|
||||
|
||||
# Ensure room version is a str
|
||||
default_room_version = str(default_room_version)
|
||||
|
||||
if default_room_version not in KNOWN_ROOM_VERSIONS:
|
||||
raise ConfigError(
|
||||
"Unknown default_room_version: %s, known room versions: %s" %
|
||||
(default_room_version, list(KNOWN_ROOM_VERSIONS.keys()))
|
||||
"Unknown default_room_version: %s, known room versions: %s"
|
||||
% (default_room_version, list(KNOWN_ROOM_VERSIONS.keys()))
|
||||
)
|
||||
|
||||
# Get the actual room version object rather than just the identifier
|
||||
|
@ -116,31 +113,25 @@ class ServerConfig(Config):
|
|||
|
||||
# Whether we should block invites sent to users on this server
|
||||
# (other than those sent by local server admins)
|
||||
self.block_non_admin_invites = config.get(
|
||||
"block_non_admin_invites", False,
|
||||
)
|
||||
self.block_non_admin_invites = config.get("block_non_admin_invites", False)
|
||||
|
||||
# Whether to enable experimental MSC1849 (aka relations) support
|
||||
self.experimental_msc1849_support_enabled = config.get(
|
||||
"experimental_msc1849_support_enabled", False,
|
||||
"experimental_msc1849_support_enabled", False
|
||||
)
|
||||
|
||||
# Options to control access by tracking MAU
|
||||
self.limit_usage_by_mau = config.get("limit_usage_by_mau", False)
|
||||
self.max_mau_value = 0
|
||||
if self.limit_usage_by_mau:
|
||||
self.max_mau_value = config.get(
|
||||
"max_mau_value", 0,
|
||||
)
|
||||
self.max_mau_value = config.get("max_mau_value", 0)
|
||||
self.mau_stats_only = config.get("mau_stats_only", False)
|
||||
|
||||
self.mau_limits_reserved_threepids = config.get(
|
||||
"mau_limit_reserved_threepids", []
|
||||
)
|
||||
|
||||
self.mau_trial_days = config.get(
|
||||
"mau_trial_days", 0,
|
||||
)
|
||||
self.mau_trial_days = config.get("mau_trial_days", 0)
|
||||
|
||||
# Options to disable HS
|
||||
self.hs_disabled = config.get("hs_disabled", False)
|
||||
|
@ -153,9 +144,7 @@ class ServerConfig(Config):
|
|||
|
||||
# FIXME: federation_domain_whitelist needs sytests
|
||||
self.federation_domain_whitelist = None
|
||||
federation_domain_whitelist = config.get(
|
||||
"federation_domain_whitelist", None,
|
||||
)
|
||||
federation_domain_whitelist = config.get("federation_domain_whitelist", None)
|
||||
|
||||
if federation_domain_whitelist is not None:
|
||||
# turn the whitelist into a hash for speed of lookup
|
||||
|
@ -165,7 +154,7 @@ class ServerConfig(Config):
|
|||
self.federation_domain_whitelist[domain] = True
|
||||
|
||||
self.federation_ip_range_blacklist = config.get(
|
||||
"federation_ip_range_blacklist", [],
|
||||
"federation_ip_range_blacklist", []
|
||||
)
|
||||
|
||||
# Attempt to create an IPSet from the given ranges
|
||||
|
@ -178,13 +167,12 @@ class ServerConfig(Config):
|
|||
self.federation_ip_range_blacklist.update(["0.0.0.0", "::"])
|
||||
except Exception as e:
|
||||
raise ConfigError(
|
||||
"Invalid range(s) provided in "
|
||||
"federation_ip_range_blacklist: %s" % e
|
||||
"Invalid range(s) provided in " "federation_ip_range_blacklist: %s" % e
|
||||
)
|
||||
|
||||
if self.public_baseurl is not None:
|
||||
if self.public_baseurl[-1] != '/':
|
||||
self.public_baseurl += '/'
|
||||
if self.public_baseurl[-1] != "/":
|
||||
self.public_baseurl += "/"
|
||||
self.start_pushers = config.get("start_pushers", True)
|
||||
|
||||
# (undocumented) option for torturing the worker-mode replication a bit,
|
||||
|
@ -195,7 +183,7 @@ class ServerConfig(Config):
|
|||
# Whether to require a user to be in the room to add an alias to it.
|
||||
# Defaults to True.
|
||||
self.require_membership_for_aliases = config.get(
|
||||
"require_membership_for_aliases", True,
|
||||
"require_membership_for_aliases", True
|
||||
)
|
||||
|
||||
# Whether to allow per-room membership profiles through the send of membership
|
||||
|
@ -227,9 +215,9 @@ class ServerConfig(Config):
|
|||
|
||||
# if we still have an empty list of addresses, use the default list
|
||||
if not bind_addresses:
|
||||
if listener['type'] == 'metrics':
|
||||
if listener["type"] == "metrics":
|
||||
# the metrics listener doesn't support IPv6
|
||||
bind_addresses.append('0.0.0.0')
|
||||
bind_addresses.append("0.0.0.0")
|
||||
else:
|
||||
bind_addresses.extend(DEFAULT_BIND_ADDRESSES)
|
||||
|
||||
|
@ -249,78 +237,72 @@ class ServerConfig(Config):
|
|||
bind_host = config.get("bind_host", "")
|
||||
gzip_responses = config.get("gzip_responses", True)
|
||||
|
||||
self.listeners.append({
|
||||
"port": bind_port,
|
||||
"bind_addresses": [bind_host],
|
||||
"tls": True,
|
||||
"type": "http",
|
||||
"resources": [
|
||||
{
|
||||
"names": ["client"],
|
||||
"compress": gzip_responses,
|
||||
},
|
||||
{
|
||||
"names": ["federation"],
|
||||
"compress": False,
|
||||
}
|
||||
]
|
||||
})
|
||||
self.listeners.append(
|
||||
{
|
||||
"port": bind_port,
|
||||
"bind_addresses": [bind_host],
|
||||
"tls": True,
|
||||
"type": "http",
|
||||
"resources": [
|
||||
{"names": ["client"], "compress": gzip_responses},
|
||||
{"names": ["federation"], "compress": False},
|
||||
],
|
||||
}
|
||||
)
|
||||
|
||||
unsecure_port = config.get("unsecure_port", bind_port - 400)
|
||||
if unsecure_port:
|
||||
self.listeners.append({
|
||||
"port": unsecure_port,
|
||||
"bind_addresses": [bind_host],
|
||||
"tls": False,
|
||||
"type": "http",
|
||||
"resources": [
|
||||
{
|
||||
"names": ["client"],
|
||||
"compress": gzip_responses,
|
||||
},
|
||||
{
|
||||
"names": ["federation"],
|
||||
"compress": False,
|
||||
}
|
||||
]
|
||||
})
|
||||
self.listeners.append(
|
||||
{
|
||||
"port": unsecure_port,
|
||||
"bind_addresses": [bind_host],
|
||||
"tls": False,
|
||||
"type": "http",
|
||||
"resources": [
|
||||
{"names": ["client"], "compress": gzip_responses},
|
||||
{"names": ["federation"], "compress": False},
|
||||
],
|
||||
}
|
||||
)
|
||||
|
||||
manhole = config.get("manhole")
|
||||
if manhole:
|
||||
self.listeners.append({
|
||||
"port": manhole,
|
||||
"bind_addresses": ["127.0.0.1"],
|
||||
"type": "manhole",
|
||||
"tls": False,
|
||||
})
|
||||
self.listeners.append(
|
||||
{
|
||||
"port": manhole,
|
||||
"bind_addresses": ["127.0.0.1"],
|
||||
"type": "manhole",
|
||||
"tls": False,
|
||||
}
|
||||
)
|
||||
|
||||
metrics_port = config.get("metrics_port")
|
||||
if metrics_port:
|
||||
logger.warn(
|
||||
("The metrics_port configuration option is deprecated in Synapse 0.31 "
|
||||
"in favour of a listener. Please see "
|
||||
"http://github.com/matrix-org/synapse/blob/master/docs/metrics-howto.rst"
|
||||
" on how to configure the new listener."))
|
||||
(
|
||||
"The metrics_port configuration option is deprecated in Synapse 0.31 "
|
||||
"in favour of a listener. Please see "
|
||||
"http://github.com/matrix-org/synapse/blob/master/docs/metrics-howto.rst"
|
||||
" on how to configure the new listener."
|
||||
)
|
||||
)
|
||||
|
||||
self.listeners.append({
|
||||
"port": metrics_port,
|
||||
"bind_addresses": [config.get("metrics_bind_host", "127.0.0.1")],
|
||||
"tls": False,
|
||||
"type": "http",
|
||||
"resources": [
|
||||
{
|
||||
"names": ["metrics"],
|
||||
"compress": False,
|
||||
},
|
||||
]
|
||||
})
|
||||
self.listeners.append(
|
||||
{
|
||||
"port": metrics_port,
|
||||
"bind_addresses": [config.get("metrics_bind_host", "127.0.0.1")],
|
||||
"tls": False,
|
||||
"type": "http",
|
||||
"resources": [{"names": ["metrics"], "compress": False}],
|
||||
}
|
||||
)
|
||||
|
||||
_check_resource_config(self.listeners)
|
||||
|
||||
# An experimental option to try and periodically clean up extremities
|
||||
# by sending dummy events.
|
||||
self.cleanup_extremities_with_dummy_events = config.get(
|
||||
"cleanup_extremities_with_dummy_events", False,
|
||||
"cleanup_extremities_with_dummy_events", False
|
||||
)
|
||||
|
||||
def has_tls_listener(self):
|
||||
|
@ -339,7 +321,8 @@ class ServerConfig(Config):
|
|||
# Bring DEFAULT_ROOM_VERSION into the local-scope for use in the
|
||||
# default config string
|
||||
default_room_version = DEFAULT_ROOM_VERSION
|
||||
return """\
|
||||
return (
|
||||
"""\
|
||||
## Server ##
|
||||
|
||||
# The domain name of the server, with optional explicit port.
|
||||
|
@ -637,7 +620,9 @@ class ServerConfig(Config):
|
|||
# Defaults to 'true'.
|
||||
#
|
||||
#allow_per_room_profiles: false
|
||||
""" % locals()
|
||||
"""
|
||||
% locals()
|
||||
)
|
||||
|
||||
def read_arguments(self, args):
|
||||
if args.manhole is not None:
|
||||
|
@ -649,17 +634,26 @@ class ServerConfig(Config):
|
|||
|
||||
def add_arguments(self, parser):
|
||||
server_group = parser.add_argument_group("server")
|
||||
server_group.add_argument("-D", "--daemonize", action='store_true',
|
||||
default=None,
|
||||
help="Daemonize the home server")
|
||||
server_group.add_argument("--print-pidfile", action='store_true',
|
||||
default=None,
|
||||
help="Print the path to the pidfile just"
|
||||
" before daemonizing")
|
||||
server_group.add_argument("--manhole", metavar="PORT", dest="manhole",
|
||||
type=int,
|
||||
help="Turn on the twisted telnet manhole"
|
||||
" service on the given port.")
|
||||
server_group.add_argument(
|
||||
"-D",
|
||||
"--daemonize",
|
||||
action="store_true",
|
||||
default=None,
|
||||
help="Daemonize the home server",
|
||||
)
|
||||
server_group.add_argument(
|
||||
"--print-pidfile",
|
||||
action="store_true",
|
||||
default=None,
|
||||
help="Print the path to the pidfile just" " before daemonizing",
|
||||
)
|
||||
server_group.add_argument(
|
||||
"--manhole",
|
||||
metavar="PORT",
|
||||
dest="manhole",
|
||||
type=int,
|
||||
help="Turn on the twisted telnet manhole" " service on the given port.",
|
||||
)
|
||||
|
||||
|
||||
def is_threepid_reserved(reserved_threepids, threepid):
|
||||
|
@ -673,7 +667,7 @@ def is_threepid_reserved(reserved_threepids, threepid):
|
|||
"""
|
||||
|
||||
for tp in reserved_threepids:
|
||||
if (threepid['medium'] == tp['medium'] and threepid['address'] == tp['address']):
|
||||
if threepid["medium"] == tp["medium"] and threepid["address"] == tp["address"]:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
@ -686,9 +680,7 @@ def read_gc_thresholds(thresholds):
|
|||
return None
|
||||
try:
|
||||
assert len(thresholds) == 3
|
||||
return (
|
||||
int(thresholds[0]), int(thresholds[1]), int(thresholds[2]),
|
||||
)
|
||||
return (int(thresholds[0]), int(thresholds[1]), int(thresholds[2]))
|
||||
except Exception:
|
||||
raise ConfigError(
|
||||
"Value of `gc_threshold` must be a list of three integers if set"
|
||||
|
@ -706,22 +698,22 @@ def _warn_if_webclient_configured(listeners):
|
|||
for listener in listeners:
|
||||
for res in listener.get("resources", []):
|
||||
for name in res.get("names", []):
|
||||
if name == 'webclient':
|
||||
if name == "webclient":
|
||||
logger.warning(NO_MORE_WEB_CLIENT_WARNING)
|
||||
return
|
||||
|
||||
|
||||
KNOWN_RESOURCES = (
|
||||
'client',
|
||||
'consent',
|
||||
'federation',
|
||||
'keys',
|
||||
'media',
|
||||
'metrics',
|
||||
'openid',
|
||||
'replication',
|
||||
'static',
|
||||
'webclient',
|
||||
"client",
|
||||
"consent",
|
||||
"federation",
|
||||
"keys",
|
||||
"media",
|
||||
"metrics",
|
||||
"openid",
|
||||
"replication",
|
||||
"static",
|
||||
"webclient",
|
||||
)
|
||||
|
||||
|
||||
|
@ -735,11 +727,9 @@ def _check_resource_config(listeners):
|
|||
|
||||
for resource in resource_names:
|
||||
if resource not in KNOWN_RESOURCES:
|
||||
raise ConfigError(
|
||||
"Unknown listener resource '%s'" % (resource, )
|
||||
)
|
||||
raise ConfigError("Unknown listener resource '%s'" % (resource,))
|
||||
if resource == "consent":
|
||||
try:
|
||||
check_requirements('resources.consent')
|
||||
check_requirements("resources.consent")
|
||||
except DependencyException as e:
|
||||
raise ConfigError(e.message)
|
||||
|
|
|
@ -58,6 +58,7 @@ class ServerNoticesConfig(Config):
|
|||
The name to use for the server notices room.
|
||||
None if server notices are not enabled.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
super(ServerNoticesConfig, self).__init__()
|
||||
self.server_notices_mxid = None
|
||||
|
@ -70,18 +71,12 @@ class ServerNoticesConfig(Config):
|
|||
if c is None:
|
||||
return
|
||||
|
||||
mxid_localpart = c['system_mxid_localpart']
|
||||
self.server_notices_mxid = UserID(
|
||||
mxid_localpart, self.server_name,
|
||||
).to_string()
|
||||
self.server_notices_mxid_display_name = c.get(
|
||||
'system_mxid_display_name', None,
|
||||
)
|
||||
self.server_notices_mxid_avatar_url = c.get(
|
||||
'system_mxid_avatar_url', None,
|
||||
)
|
||||
mxid_localpart = c["system_mxid_localpart"]
|
||||
self.server_notices_mxid = UserID(mxid_localpart, self.server_name).to_string()
|
||||
self.server_notices_mxid_display_name = c.get("system_mxid_display_name", None)
|
||||
self.server_notices_mxid_avatar_url = c.get("system_mxid_avatar_url", None)
|
||||
# todo: i18n
|
||||
self.server_notices_room_name = c.get('room_name', "Server Notices")
|
||||
self.server_notices_room_name = c.get("room_name", "Server Notices")
|
||||
|
||||
def default_config(self, **kwargs):
|
||||
return DEFAULT_CONFIG
|
||||
|
|
|
@ -42,11 +42,11 @@ class TlsConfig(Config):
|
|||
self.acme_enabled = acme_config.get("enabled", False)
|
||||
|
||||
# hyperlink complains on py2 if this is not a Unicode
|
||||
self.acme_url = six.text_type(acme_config.get(
|
||||
"url", u"https://acme-v01.api.letsencrypt.org/directory"
|
||||
))
|
||||
self.acme_url = six.text_type(
|
||||
acme_config.get("url", "https://acme-v01.api.letsencrypt.org/directory")
|
||||
)
|
||||
self.acme_port = acme_config.get("port", 80)
|
||||
self.acme_bind_addresses = acme_config.get("bind_addresses", ['::', '0.0.0.0'])
|
||||
self.acme_bind_addresses = acme_config.get("bind_addresses", ["::", "0.0.0.0"])
|
||||
self.acme_reprovision_threshold = acme_config.get("reprovision_threshold", 30)
|
||||
self.acme_domain = acme_config.get("domain", config.get("server_name"))
|
||||
|
||||
|
@ -74,12 +74,12 @@ class TlsConfig(Config):
|
|||
|
||||
# Whether to verify certificates on outbound federation traffic
|
||||
self.federation_verify_certificates = config.get(
|
||||
"federation_verify_certificates", True,
|
||||
"federation_verify_certificates", True
|
||||
)
|
||||
|
||||
# Whitelist of domains to not verify certificates for
|
||||
fed_whitelist_entries = config.get(
|
||||
"federation_certificate_verification_whitelist", [],
|
||||
"federation_certificate_verification_whitelist", []
|
||||
)
|
||||
|
||||
# Support globs (*) in whitelist values
|
||||
|
@ -90,9 +90,7 @@ class TlsConfig(Config):
|
|||
self.federation_certificate_verification_whitelist.append(entry_regex)
|
||||
|
||||
# List of custom certificate authorities for federation traffic validation
|
||||
custom_ca_list = config.get(
|
||||
"federation_custom_ca_list", None,
|
||||
)
|
||||
custom_ca_list = config.get("federation_custom_ca_list", None)
|
||||
|
||||
# Read in and parse custom CA certificates
|
||||
self.federation_ca_trust_root = None
|
||||
|
@ -101,8 +99,10 @@ class TlsConfig(Config):
|
|||
# A trustroot cannot be generated without any CA certificates.
|
||||
# Raise an error if this option has been specified without any
|
||||
# corresponding certificates.
|
||||
raise ConfigError("federation_custom_ca_list specified without "
|
||||
"any certificate files")
|
||||
raise ConfigError(
|
||||
"federation_custom_ca_list specified without "
|
||||
"any certificate files"
|
||||
)
|
||||
|
||||
certs = []
|
||||
for ca_file in custom_ca_list:
|
||||
|
@ -114,8 +114,9 @@ class TlsConfig(Config):
|
|||
cert_base = Certificate.loadPEM(content)
|
||||
certs.append(cert_base)
|
||||
except Exception as e:
|
||||
raise ConfigError("Error parsing custom CA certificate file %s: %s"
|
||||
% (ca_file, e))
|
||||
raise ConfigError(
|
||||
"Error parsing custom CA certificate file %s: %s" % (ca_file, e)
|
||||
)
|
||||
|
||||
self.federation_ca_trust_root = trustRootFromCertificates(certs)
|
||||
|
||||
|
@ -146,17 +147,21 @@ class TlsConfig(Config):
|
|||
return None
|
||||
|
||||
try:
|
||||
with open(self.tls_certificate_file, 'rb') as f:
|
||||
with open(self.tls_certificate_file, "rb") as f:
|
||||
cert_pem = f.read()
|
||||
except Exception as e:
|
||||
raise ConfigError("Failed to read existing certificate file %s: %s"
|
||||
% (self.tls_certificate_file, e))
|
||||
raise ConfigError(
|
||||
"Failed to read existing certificate file %s: %s"
|
||||
% (self.tls_certificate_file, e)
|
||||
)
|
||||
|
||||
try:
|
||||
tls_certificate = crypto.load_certificate(crypto.FILETYPE_PEM, cert_pem)
|
||||
except Exception as e:
|
||||
raise ConfigError("Failed to parse existing certificate file %s: %s"
|
||||
% (self.tls_certificate_file, e))
|
||||
raise ConfigError(
|
||||
"Failed to parse existing certificate file %s: %s"
|
||||
% (self.tls_certificate_file, e)
|
||||
)
|
||||
|
||||
if not allow_self_signed:
|
||||
if tls_certificate.get_subject() == tls_certificate.get_issuer():
|
||||
|
@ -166,7 +171,7 @@ class TlsConfig(Config):
|
|||
|
||||
# YYYYMMDDhhmmssZ -- in UTC
|
||||
expires_on = datetime.strptime(
|
||||
tls_certificate.get_notAfter().decode('ascii'), "%Y%m%d%H%M%SZ"
|
||||
tls_certificate.get_notAfter().decode("ascii"), "%Y%m%d%H%M%SZ"
|
||||
)
|
||||
now = datetime.utcnow()
|
||||
days_remaining = (expires_on - now).days
|
||||
|
@ -191,7 +196,8 @@ class TlsConfig(Config):
|
|||
except Exception as e:
|
||||
logger.info(
|
||||
"Unable to read TLS certificate (%s). Ignoring as no "
|
||||
"tls listeners enabled.", e,
|
||||
"tls listeners enabled.",
|
||||
e,
|
||||
)
|
||||
|
||||
self.tls_fingerprints = list(self._original_tls_fingerprints)
|
||||
|
@ -205,7 +211,7 @@ class TlsConfig(Config):
|
|||
sha256_fingerprint = encode_base64(sha256(x509_certificate_bytes).digest())
|
||||
sha256_fingerprints = set(f["sha256"] for f in self.tls_fingerprints)
|
||||
if sha256_fingerprint not in sha256_fingerprints:
|
||||
self.tls_fingerprints.append({u"sha256": sha256_fingerprint})
|
||||
self.tls_fingerprints.append({"sha256": sha256_fingerprint})
|
||||
|
||||
def default_config(self, config_dir_path, server_name, **kwargs):
|
||||
base_key_name = os.path.join(config_dir_path, server_name)
|
||||
|
@ -215,8 +221,8 @@ class TlsConfig(Config):
|
|||
|
||||
# this is to avoid the max line length. Sorrynotsorry
|
||||
proxypassline = (
|
||||
'ProxyPass /.well-known/acme-challenge '
|
||||
'http://localhost:8009/.well-known/acme-challenge'
|
||||
"ProxyPass /.well-known/acme-challenge "
|
||||
"http://localhost:8009/.well-known/acme-challenge"
|
||||
)
|
||||
|
||||
return (
|
||||
|
|
|
@ -26,11 +26,11 @@ class UserDirectoryConfig(Config):
|
|||
self.user_directory_search_all_users = False
|
||||
user_directory_config = config.get("user_directory", None)
|
||||
if user_directory_config:
|
||||
self.user_directory_search_enabled = (
|
||||
user_directory_config.get("enabled", True)
|
||||
self.user_directory_search_enabled = user_directory_config.get(
|
||||
"enabled", True
|
||||
)
|
||||
self.user_directory_search_all_users = (
|
||||
user_directory_config.get("search_all_users", False)
|
||||
self.user_directory_search_all_users = user_directory_config.get(
|
||||
"search_all_users", False
|
||||
)
|
||||
|
||||
def default_config(self, config_dir_path, server_name, **kwargs):
|
||||
|
|
|
@ -16,14 +16,13 @@ from ._base import Config
|
|||
|
||||
|
||||
class VoipConfig(Config):
|
||||
|
||||
def read_config(self, config):
|
||||
self.turn_uris = config.get("turn_uris", [])
|
||||
self.turn_shared_secret = config.get("turn_shared_secret")
|
||||
self.turn_username = config.get("turn_username")
|
||||
self.turn_password = config.get("turn_password")
|
||||
self.turn_user_lifetime = self.parse_duration(
|
||||
config.get("turn_user_lifetime", "1h"),
|
||||
config.get("turn_user_lifetime", "1h")
|
||||
)
|
||||
self.turn_allow_guests = config.get("turn_allow_guests", True)
|
||||
|
||||
|
|
|
@ -52,12 +52,14 @@ class WorkerConfig(Config):
|
|||
# argument.
|
||||
manhole = config.get("worker_manhole")
|
||||
if manhole:
|
||||
self.worker_listeners.append({
|
||||
"port": manhole,
|
||||
"bind_addresses": ["127.0.0.1"],
|
||||
"type": "manhole",
|
||||
"tls": False,
|
||||
})
|
||||
self.worker_listeners.append(
|
||||
{
|
||||
"port": manhole,
|
||||
"bind_addresses": ["127.0.0.1"],
|
||||
"type": "manhole",
|
||||
"tls": False,
|
||||
}
|
||||
)
|
||||
|
||||
if self.worker_listeners:
|
||||
for listener in self.worker_listeners:
|
||||
|
@ -67,7 +69,7 @@ class WorkerConfig(Config):
|
|||
if bind_address:
|
||||
bind_addresses.append(bind_address)
|
||||
elif not bind_addresses:
|
||||
bind_addresses.append('')
|
||||
bind_addresses.append("")
|
||||
|
||||
def read_arguments(self, args):
|
||||
# We support a bunch of command line arguments that override options in
|
||||
|
|
|
@ -46,9 +46,7 @@ def check_event_content_hash(event, hash_algorithm=hashlib.sha256):
|
|||
if name not in hashes:
|
||||
raise SynapseError(
|
||||
400,
|
||||
"Algorithm %s not in hashes %s" % (
|
||||
name, list(hashes),
|
||||
),
|
||||
"Algorithm %s not in hashes %s" % (name, list(hashes)),
|
||||
Codes.UNAUTHORIZED,
|
||||
)
|
||||
message_hash_base64 = hashes[name]
|
||||
|
@ -56,9 +54,7 @@ def check_event_content_hash(event, hash_algorithm=hashlib.sha256):
|
|||
message_hash_bytes = decode_base64(message_hash_base64)
|
||||
except Exception:
|
||||
raise SynapseError(
|
||||
400,
|
||||
"Invalid base64: %s" % (message_hash_base64,),
|
||||
Codes.UNAUTHORIZED,
|
||||
400, "Invalid base64: %s" % (message_hash_base64,), Codes.UNAUTHORIZED
|
||||
)
|
||||
return message_hash_bytes == expected_hash
|
||||
|
||||
|
@ -135,8 +131,9 @@ def compute_event_signature(event_dict, signature_name, signing_key):
|
|||
return redact_json["signatures"]
|
||||
|
||||
|
||||
def add_hashes_and_signatures(event_dict, signature_name, signing_key,
|
||||
hash_algorithm=hashlib.sha256):
|
||||
def add_hashes_and_signatures(
|
||||
event_dict, signature_name, signing_key, hash_algorithm=hashlib.sha256
|
||||
):
|
||||
"""Add content hash and sign the event
|
||||
|
||||
Args:
|
||||
|
@ -153,7 +150,5 @@ def add_hashes_and_signatures(event_dict, signature_name, signing_key,
|
|||
event_dict.setdefault("hashes", {})[name] = encode_base64(digest)
|
||||
|
||||
event_dict["signatures"] = compute_event_signature(
|
||||
event_dict,
|
||||
signature_name=signature_name,
|
||||
signing_key=signing_key,
|
||||
event_dict, signature_name=signature_name, signing_key=signing_key
|
||||
)
|
||||
|
|
|
@ -505,7 +505,7 @@ class BaseV2KeyFetcher(object):
|
|||
Returns:
|
||||
Deferred[dict[str, FetchKeyResult]]: map from key_id to result object
|
||||
"""
|
||||
ts_valid_until_ms = response_json[u"valid_until_ts"]
|
||||
ts_valid_until_ms = response_json["valid_until_ts"]
|
||||
|
||||
# start by extracting the keys from the response, since they may be required
|
||||
# to validate the signature on the response.
|
||||
|
@ -614,10 +614,7 @@ class PerspectivesKeyFetcher(BaseV2KeyFetcher):
|
|||
|
||||
results = yield logcontext.make_deferred_yieldable(
|
||||
defer.gatherResults(
|
||||
[
|
||||
run_in_background(get_key, server)
|
||||
for server in self.key_servers
|
||||
],
|
||||
[run_in_background(get_key, server) for server in self.key_servers],
|
||||
consumeErrors=True,
|
||||
).addErrback(unwrapFirstError)
|
||||
)
|
||||
|
@ -630,9 +627,7 @@ class PerspectivesKeyFetcher(BaseV2KeyFetcher):
|
|||
defer.returnValue(union_of_keys)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def get_server_verify_key_v2_indirect(
|
||||
self, keys_to_fetch, key_server
|
||||
):
|
||||
def get_server_verify_key_v2_indirect(self, keys_to_fetch, key_server):
|
||||
"""
|
||||
Args:
|
||||
keys_to_fetch (dict[str, dict[str, int]]):
|
||||
|
@ -661,9 +656,9 @@ class PerspectivesKeyFetcher(BaseV2KeyFetcher):
|
|||
destination=perspective_name,
|
||||
path="/_matrix/key/v2/query",
|
||||
data={
|
||||
u"server_keys": {
|
||||
"server_keys": {
|
||||
server_name: {
|
||||
key_id: {u"minimum_valid_until_ts": min_valid_ts}
|
||||
key_id: {"minimum_valid_until_ts": min_valid_ts}
|
||||
for key_id, min_valid_ts in server_keys.items()
|
||||
}
|
||||
for server_name, server_keys in keys_to_fetch.items()
|
||||
|
@ -690,10 +685,7 @@ class PerspectivesKeyFetcher(BaseV2KeyFetcher):
|
|||
)
|
||||
|
||||
try:
|
||||
self._validate_perspectives_response(
|
||||
key_server,
|
||||
response,
|
||||
)
|
||||
self._validate_perspectives_response(key_server, response)
|
||||
|
||||
processed_response = yield self.process_v2_response(
|
||||
perspective_name, response, time_added_ms=time_now_ms
|
||||
|
@ -720,9 +712,7 @@ class PerspectivesKeyFetcher(BaseV2KeyFetcher):
|
|||
|
||||
defer.returnValue(keys)
|
||||
|
||||
def _validate_perspectives_response(
|
||||
self, key_server, response,
|
||||
):
|
||||
def _validate_perspectives_response(self, key_server, response):
|
||||
"""Optionally check the signature on the result of a /key/query request
|
||||
|
||||
Args:
|
||||
|
@ -739,13 +729,13 @@ class PerspectivesKeyFetcher(BaseV2KeyFetcher):
|
|||
return
|
||||
|
||||
if (
|
||||
u"signatures" not in response
|
||||
or perspective_name not in response[u"signatures"]
|
||||
"signatures" not in response
|
||||
or perspective_name not in response["signatures"]
|
||||
):
|
||||
raise KeyLookupError("Response not signed by the notary server")
|
||||
|
||||
verified = False
|
||||
for key_id in response[u"signatures"][perspective_name]:
|
||||
for key_id in response["signatures"][perspective_name]:
|
||||
if key_id in perspective_keys:
|
||||
verify_signed_json(response, perspective_name, perspective_keys[key_id])
|
||||
verified = True
|
||||
|
@ -754,7 +744,7 @@ class PerspectivesKeyFetcher(BaseV2KeyFetcher):
|
|||
raise KeyLookupError(
|
||||
"Response not signed with a known key: signed with: %r, known keys: %r"
|
||||
% (
|
||||
list(response[u"signatures"][perspective_name].keys()),
|
||||
list(response["signatures"][perspective_name].keys()),
|
||||
list(perspective_keys.keys()),
|
||||
)
|
||||
)
|
||||
|
@ -826,7 +816,6 @@ class ServerKeyFetcher(BaseV2KeyFetcher):
|
|||
path="/_matrix/key/v2/server/"
|
||||
+ urllib.parse.quote(requested_key_id),
|
||||
ignore_backoff=True,
|
||||
|
||||
# we only give the remote server 10s to respond. It should be an
|
||||
# easy request to handle, so if it doesn't reply within 10s, it's
|
||||
# probably not going to.
|
||||
|
|
|
@ -85,17 +85,14 @@ def check(room_version, event, auth_events, do_sig_check=True, do_size_check=Tru
|
|||
room_id_domain = get_domain_from_id(event.room_id)
|
||||
if room_id_domain != sender_domain:
|
||||
raise AuthError(
|
||||
403,
|
||||
"Creation event's room_id domain does not match sender's"
|
||||
403, "Creation event's room_id domain does not match sender's"
|
||||
)
|
||||
|
||||
room_version = event.content.get("room_version", "1")
|
||||
if room_version not in KNOWN_ROOM_VERSIONS:
|
||||
raise AuthError(
|
||||
403,
|
||||
"room appears to have unsupported version %s" % (
|
||||
room_version,
|
||||
))
|
||||
403, "room appears to have unsupported version %s" % (room_version,)
|
||||
)
|
||||
# FIXME
|
||||
logger.debug("Allowing! %s", event)
|
||||
return
|
||||
|
@ -103,46 +100,30 @@ def check(room_version, event, auth_events, do_sig_check=True, do_size_check=Tru
|
|||
creation_event = auth_events.get((EventTypes.Create, ""), None)
|
||||
|
||||
if not creation_event:
|
||||
raise AuthError(
|
||||
403,
|
||||
"No create event in auth events",
|
||||
)
|
||||
raise AuthError(403, "No create event in auth events")
|
||||
|
||||
creating_domain = get_domain_from_id(event.room_id)
|
||||
originating_domain = get_domain_from_id(event.sender)
|
||||
if creating_domain != originating_domain:
|
||||
if not _can_federate(event, auth_events):
|
||||
raise AuthError(
|
||||
403,
|
||||
"This room has been marked as unfederatable."
|
||||
)
|
||||
raise AuthError(403, "This room has been marked as unfederatable.")
|
||||
|
||||
# FIXME: Temp hack
|
||||
if event.type == EventTypes.Aliases:
|
||||
if not event.is_state():
|
||||
raise AuthError(
|
||||
403,
|
||||
"Alias event must be a state event",
|
||||
)
|
||||
raise AuthError(403, "Alias event must be a state event")
|
||||
if not event.state_key:
|
||||
raise AuthError(
|
||||
403,
|
||||
"Alias event must have non-empty state_key"
|
||||
)
|
||||
raise AuthError(403, "Alias event must have non-empty state_key")
|
||||
sender_domain = get_domain_from_id(event.sender)
|
||||
if event.state_key != sender_domain:
|
||||
raise AuthError(
|
||||
403,
|
||||
"Alias event's state_key does not match sender's domain"
|
||||
403, "Alias event's state_key does not match sender's domain"
|
||||
)
|
||||
logger.debug("Allowing! %s", event)
|
||||
return
|
||||
|
||||
if logger.isEnabledFor(logging.DEBUG):
|
||||
logger.debug(
|
||||
"Auth events: %s",
|
||||
[a.event_id for a in auth_events.values()]
|
||||
)
|
||||
logger.debug("Auth events: %s", [a.event_id for a in auth_events.values()])
|
||||
|
||||
if event.type == EventTypes.Member:
|
||||
_is_membership_change_allowed(event, auth_events)
|
||||
|
@ -159,9 +140,7 @@ def check(room_version, event, auth_events, do_sig_check=True, do_size_check=Tru
|
|||
invite_level = _get_named_level(auth_events, "invite", 0)
|
||||
|
||||
if user_level < invite_level:
|
||||
raise AuthError(
|
||||
403, "You don't have permission to invite users",
|
||||
)
|
||||
raise AuthError(403, "You don't have permission to invite users")
|
||||
else:
|
||||
logger.debug("Allowing! %s", event)
|
||||
return
|
||||
|
@ -207,7 +186,7 @@ def _is_membership_change_allowed(event, auth_events):
|
|||
# Check if this is the room creator joining:
|
||||
if len(event.prev_event_ids()) == 1 and Membership.JOIN == membership:
|
||||
# Get room creation event:
|
||||
key = (EventTypes.Create, "", )
|
||||
key = (EventTypes.Create, "")
|
||||
create = auth_events.get(key)
|
||||
if create and event.prev_event_ids()[0] == create.event_id:
|
||||
if create.content["creator"] == event.state_key:
|
||||
|
@ -219,38 +198,31 @@ def _is_membership_change_allowed(event, auth_events):
|
|||
target_domain = get_domain_from_id(target_user_id)
|
||||
if creating_domain != target_domain:
|
||||
if not _can_federate(event, auth_events):
|
||||
raise AuthError(
|
||||
403,
|
||||
"This room has been marked as unfederatable."
|
||||
)
|
||||
raise AuthError(403, "This room has been marked as unfederatable.")
|
||||
|
||||
# get info about the caller
|
||||
key = (EventTypes.Member, event.user_id, )
|
||||
key = (EventTypes.Member, event.user_id)
|
||||
caller = auth_events.get(key)
|
||||
|
||||
caller_in_room = caller and caller.membership == Membership.JOIN
|
||||
caller_invited = caller and caller.membership == Membership.INVITE
|
||||
|
||||
# get info about the target
|
||||
key = (EventTypes.Member, target_user_id, )
|
||||
key = (EventTypes.Member, target_user_id)
|
||||
target = auth_events.get(key)
|
||||
|
||||
target_in_room = target and target.membership == Membership.JOIN
|
||||
target_banned = target and target.membership == Membership.BAN
|
||||
|
||||
key = (EventTypes.JoinRules, "", )
|
||||
key = (EventTypes.JoinRules, "")
|
||||
join_rule_event = auth_events.get(key)
|
||||
if join_rule_event:
|
||||
join_rule = join_rule_event.content.get(
|
||||
"join_rule", JoinRules.INVITE
|
||||
)
|
||||
join_rule = join_rule_event.content.get("join_rule", JoinRules.INVITE)
|
||||
else:
|
||||
join_rule = JoinRules.INVITE
|
||||
|
||||
user_level = get_user_power_level(event.user_id, auth_events)
|
||||
target_level = get_user_power_level(
|
||||
target_user_id, auth_events
|
||||
)
|
||||
target_level = get_user_power_level(target_user_id, auth_events)
|
||||
|
||||
# FIXME (erikj): What should we do here as the default?
|
||||
ban_level = _get_named_level(auth_events, "ban", 50)
|
||||
|
@ -266,29 +238,26 @@ def _is_membership_change_allowed(event, auth_events):
|
|||
"join_rule": join_rule,
|
||||
"target_user_id": target_user_id,
|
||||
"event.user_id": event.user_id,
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
if Membership.INVITE == membership and "third_party_invite" in event.content:
|
||||
if not _verify_third_party_invite(event, auth_events):
|
||||
raise AuthError(403, "You are not invited to this room.")
|
||||
if target_banned:
|
||||
raise AuthError(
|
||||
403, "%s is banned from the room" % (target_user_id,)
|
||||
)
|
||||
raise AuthError(403, "%s is banned from the room" % (target_user_id,))
|
||||
return
|
||||
|
||||
if Membership.JOIN != membership:
|
||||
if (caller_invited
|
||||
and Membership.LEAVE == membership
|
||||
and target_user_id == event.user_id):
|
||||
if (
|
||||
caller_invited
|
||||
and Membership.LEAVE == membership
|
||||
and target_user_id == event.user_id
|
||||
):
|
||||
return
|
||||
|
||||
if not caller_in_room: # caller isn't joined
|
||||
raise AuthError(
|
||||
403,
|
||||
"%s not in room %s." % (event.user_id, event.room_id,)
|
||||
)
|
||||
raise AuthError(403, "%s not in room %s." % (event.user_id, event.room_id))
|
||||
|
||||
if Membership.INVITE == membership:
|
||||
# TODO (erikj): We should probably handle this more intelligently
|
||||
|
@ -296,19 +265,14 @@ def _is_membership_change_allowed(event, auth_events):
|
|||
|
||||
# Invites are valid iff caller is in the room and target isn't.
|
||||
if target_banned:
|
||||
raise AuthError(
|
||||
403, "%s is banned from the room" % (target_user_id,)
|
||||
)
|
||||
raise AuthError(403, "%s is banned from the room" % (target_user_id,))
|
||||
elif target_in_room: # the target is already in the room.
|
||||
raise AuthError(403, "%s is already in the room." %
|
||||
target_user_id)
|
||||
raise AuthError(403, "%s is already in the room." % target_user_id)
|
||||
else:
|
||||
invite_level = _get_named_level(auth_events, "invite", 0)
|
||||
|
||||
if user_level < invite_level:
|
||||
raise AuthError(
|
||||
403, "You don't have permission to invite users",
|
||||
)
|
||||
raise AuthError(403, "You don't have permission to invite users")
|
||||
elif Membership.JOIN == membership:
|
||||
# Joins are valid iff caller == target and they were:
|
||||
# invited: They are accepting the invitation
|
||||
|
@ -329,16 +293,12 @@ def _is_membership_change_allowed(event, auth_events):
|
|||
elif Membership.LEAVE == membership:
|
||||
# TODO (erikj): Implement kicks.
|
||||
if target_banned and user_level < ban_level:
|
||||
raise AuthError(
|
||||
403, "You cannot unban user %s." % (target_user_id,)
|
||||
)
|
||||
raise AuthError(403, "You cannot unban user %s." % (target_user_id,))
|
||||
elif target_user_id != event.user_id:
|
||||
kick_level = _get_named_level(auth_events, "kick", 50)
|
||||
|
||||
if user_level < kick_level or user_level <= target_level:
|
||||
raise AuthError(
|
||||
403, "You cannot kick user %s." % target_user_id
|
||||
)
|
||||
raise AuthError(403, "You cannot kick user %s." % target_user_id)
|
||||
elif Membership.BAN == membership:
|
||||
if user_level < ban_level or user_level <= target_level:
|
||||
raise AuthError(403, "You don't have permission to ban")
|
||||
|
@ -347,21 +307,17 @@ def _is_membership_change_allowed(event, auth_events):
|
|||
|
||||
|
||||
def _check_event_sender_in_room(event, auth_events):
|
||||
key = (EventTypes.Member, event.user_id, )
|
||||
key = (EventTypes.Member, event.user_id)
|
||||
member_event = auth_events.get(key)
|
||||
|
||||
return _check_joined_room(
|
||||
member_event,
|
||||
event.user_id,
|
||||
event.room_id
|
||||
)
|
||||
return _check_joined_room(member_event, event.user_id, event.room_id)
|
||||
|
||||
|
||||
def _check_joined_room(member, user_id, room_id):
|
||||
if not member or member.membership != Membership.JOIN:
|
||||
raise AuthError(403, "User %s not in room %s (%s)" % (
|
||||
user_id, room_id, repr(member)
|
||||
))
|
||||
raise AuthError(
|
||||
403, "User %s not in room %s (%s)" % (user_id, room_id, repr(member))
|
||||
)
|
||||
|
||||
|
||||
def get_send_level(etype, state_key, power_levels_event):
|
||||
|
@ -402,26 +358,21 @@ def get_send_level(etype, state_key, power_levels_event):
|
|||
def _can_send_event(event, auth_events):
|
||||
power_levels_event = _get_power_level_event(auth_events)
|
||||
|
||||
send_level = get_send_level(
|
||||
event.type, event.get("state_key"), power_levels_event,
|
||||
)
|
||||
send_level = get_send_level(event.type, event.get("state_key"), power_levels_event)
|
||||
user_level = get_user_power_level(event.user_id, auth_events)
|
||||
|
||||
if user_level < send_level:
|
||||
raise AuthError(
|
||||
403,
|
||||
"You don't have permission to post that to the room. " +
|
||||
"user_level (%d) < send_level (%d)" % (user_level, send_level)
|
||||
"You don't have permission to post that to the room. "
|
||||
+ "user_level (%d) < send_level (%d)" % (user_level, send_level),
|
||||
)
|
||||
|
||||
# Check state_key
|
||||
if hasattr(event, "state_key"):
|
||||
if event.state_key.startswith("@"):
|
||||
if event.state_key != event.user_id:
|
||||
raise AuthError(
|
||||
403,
|
||||
"You are not allowed to set others state"
|
||||
)
|
||||
raise AuthError(403, "You are not allowed to set others state")
|
||||
|
||||
return True
|
||||
|
||||
|
@ -459,10 +410,7 @@ def check_redaction(room_version, event, auth_events):
|
|||
event.internal_metadata.recheck_redaction = True
|
||||
return True
|
||||
|
||||
raise AuthError(
|
||||
403,
|
||||
"You don't have permission to redact events"
|
||||
)
|
||||
raise AuthError(403, "You don't have permission to redact events")
|
||||
|
||||
|
||||
def _check_power_levels(event, auth_events):
|
||||
|
@ -479,7 +427,7 @@ def _check_power_levels(event, auth_events):
|
|||
except Exception:
|
||||
raise SynapseError(400, "Not a valid power level: %s" % (v,))
|
||||
|
||||
key = (event.type, event.state_key, )
|
||||
key = (event.type, event.state_key)
|
||||
current_state = auth_events.get(key)
|
||||
|
||||
if not current_state:
|
||||
|
@ -500,16 +448,12 @@ def _check_power_levels(event, auth_events):
|
|||
|
||||
old_list = current_state.content.get("users", {})
|
||||
for user in set(list(old_list) + list(user_list)):
|
||||
levels_to_check.append(
|
||||
(user, "users")
|
||||
)
|
||||
levels_to_check.append((user, "users"))
|
||||
|
||||
old_list = current_state.content.get("events", {})
|
||||
new_list = event.content.get("events", {})
|
||||
for ev_id in set(list(old_list) + list(new_list)):
|
||||
levels_to_check.append(
|
||||
(ev_id, "events")
|
||||
)
|
||||
levels_to_check.append((ev_id, "events"))
|
||||
|
||||
old_state = current_state.content
|
||||
new_state = event.content
|
||||
|
@ -540,7 +484,7 @@ def _check_power_levels(event, auth_events):
|
|||
raise AuthError(
|
||||
403,
|
||||
"You don't have permission to remove ops level equal "
|
||||
"to your own"
|
||||
"to your own",
|
||||
)
|
||||
|
||||
# Check if the old and new levels are greater than the user level
|
||||
|
@ -550,8 +494,7 @@ def _check_power_levels(event, auth_events):
|
|||
if old_level_too_big or new_level_too_big:
|
||||
raise AuthError(
|
||||
403,
|
||||
"You don't have permission to add ops level greater "
|
||||
"than your own"
|
||||
"You don't have permission to add ops level greater " "than your own",
|
||||
)
|
||||
|
||||
|
||||
|
@ -587,10 +530,9 @@ def get_user_power_level(user_id, auth_events):
|
|||
|
||||
# some things which call this don't pass the create event: hack around
|
||||
# that.
|
||||
key = (EventTypes.Create, "", )
|
||||
key = (EventTypes.Create, "")
|
||||
create_event = auth_events.get(key)
|
||||
if (create_event is not None and
|
||||
create_event.content["creator"] == user_id):
|
||||
if create_event is not None and create_event.content["creator"] == user_id:
|
||||
return 100
|
||||
else:
|
||||
return 0
|
||||
|
@ -636,9 +578,7 @@ def _verify_third_party_invite(event, auth_events):
|
|||
|
||||
token = signed["token"]
|
||||
|
||||
invite_event = auth_events.get(
|
||||
(EventTypes.ThirdPartyInvite, token,)
|
||||
)
|
||||
invite_event = auth_events.get((EventTypes.ThirdPartyInvite, token))
|
||||
if not invite_event:
|
||||
return False
|
||||
|
||||
|
@ -661,8 +601,7 @@ def _verify_third_party_invite(event, auth_events):
|
|||
if not key_name.startswith("ed25519:"):
|
||||
continue
|
||||
verify_key = decode_verify_key_bytes(
|
||||
key_name,
|
||||
decode_base64(public_key)
|
||||
key_name, decode_base64(public_key)
|
||||
)
|
||||
verify_signed_json(signed, server, verify_key)
|
||||
|
||||
|
@ -671,7 +610,7 @@ def _verify_third_party_invite(event, auth_events):
|
|||
# The caller is responsible for checking that the signing
|
||||
# server has not revoked that public key.
|
||||
return True
|
||||
except (KeyError, SignatureVerifyException,):
|
||||
except (KeyError, SignatureVerifyException):
|
||||
continue
|
||||
return False
|
||||
|
||||
|
@ -679,9 +618,7 @@ def _verify_third_party_invite(event, auth_events):
|
|||
def get_public_keys(invite_event):
|
||||
public_keys = []
|
||||
if "public_key" in invite_event.content:
|
||||
o = {
|
||||
"public_key": invite_event.content["public_key"],
|
||||
}
|
||||
o = {"public_key": invite_event.content["public_key"]}
|
||||
if "key_validity_url" in invite_event.content:
|
||||
o["key_validity_url"] = invite_event.content["key_validity_url"]
|
||||
public_keys.append(o)
|
||||
|
@ -702,22 +639,22 @@ def auth_types_for_event(event):
|
|||
|
||||
auth_types = []
|
||||
|
||||
auth_types.append((EventTypes.PowerLevels, "", ))
|
||||
auth_types.append((EventTypes.Member, event.sender, ))
|
||||
auth_types.append((EventTypes.Create, "", ))
|
||||
auth_types.append((EventTypes.PowerLevels, ""))
|
||||
auth_types.append((EventTypes.Member, event.sender))
|
||||
auth_types.append((EventTypes.Create, ""))
|
||||
|
||||
if event.type == EventTypes.Member:
|
||||
membership = event.content["membership"]
|
||||
if membership in [Membership.JOIN, Membership.INVITE]:
|
||||
auth_types.append((EventTypes.JoinRules, "", ))
|
||||
auth_types.append((EventTypes.JoinRules, ""))
|
||||
|
||||
auth_types.append((EventTypes.Member, event.state_key, ))
|
||||
auth_types.append((EventTypes.Member, event.state_key))
|
||||
|
||||
if membership == Membership.INVITE:
|
||||
if "third_party_invite" in event.content:
|
||||
key = (
|
||||
EventTypes.ThirdPartyInvite,
|
||||
event.content["third_party_invite"]["signed"]["token"]
|
||||
event.content["third_party_invite"]["signed"]["token"],
|
||||
)
|
||||
auth_types.append(key)
|
||||
|
||||
|
|
|
@ -127,25 +127,25 @@ def _event_dict_property(key):
|
|||
except KeyError:
|
||||
raise AttributeError(key)
|
||||
|
||||
return property(
|
||||
getter,
|
||||
setter,
|
||||
delete,
|
||||
)
|
||||
return property(getter, setter, delete)
|
||||
|
||||
|
||||
class EventBase(object):
|
||||
def __init__(self, event_dict, signatures={}, unsigned={},
|
||||
internal_metadata_dict={}, rejected_reason=None):
|
||||
def __init__(
|
||||
self,
|
||||
event_dict,
|
||||
signatures={},
|
||||
unsigned={},
|
||||
internal_metadata_dict={},
|
||||
rejected_reason=None,
|
||||
):
|
||||
self.signatures = signatures
|
||||
self.unsigned = unsigned
|
||||
self.rejected_reason = rejected_reason
|
||||
|
||||
self._event_dict = event_dict
|
||||
|
||||
self.internal_metadata = _EventInternalMetadata(
|
||||
internal_metadata_dict
|
||||
)
|
||||
self.internal_metadata = _EventInternalMetadata(internal_metadata_dict)
|
||||
|
||||
auth_events = _event_dict_property("auth_events")
|
||||
depth = _event_dict_property("depth")
|
||||
|
@ -168,10 +168,7 @@ class EventBase(object):
|
|||
|
||||
def get_dict(self):
|
||||
d = dict(self._event_dict)
|
||||
d.update({
|
||||
"signatures": self.signatures,
|
||||
"unsigned": dict(self.unsigned),
|
||||
})
|
||||
d.update({"signatures": self.signatures, "unsigned": dict(self.unsigned)})
|
||||
|
||||
return d
|
||||
|
||||
|
@ -358,6 +355,7 @@ class FrozenEventV2(EventBase):
|
|||
|
||||
class FrozenEventV3(FrozenEventV2):
|
||||
"""FrozenEventV3, which differs from FrozenEventV2 only in the event_id format"""
|
||||
|
||||
format_version = EventFormatVersions.V3 # All events of this type are V3
|
||||
|
||||
@property
|
||||
|
@ -414,6 +412,4 @@ def event_type_from_format_version(format_version):
|
|||
elif format_version == EventFormatVersions.V3:
|
||||
return FrozenEventV3
|
||||
else:
|
||||
raise Exception(
|
||||
"No event format %r" % (format_version,)
|
||||
)
|
||||
raise Exception("No event format %r" % (format_version,))
|
||||
|
|
|
@ -78,7 +78,9 @@ class EventBuilder(object):
|
|||
_redacts = attr.ib(default=None)
|
||||
_origin_server_ts = attr.ib(default=None)
|
||||
|
||||
internal_metadata = attr.ib(default=attr.Factory(lambda: _EventInternalMetadata({})))
|
||||
internal_metadata = attr.ib(
|
||||
default=attr.Factory(lambda: _EventInternalMetadata({}))
|
||||
)
|
||||
|
||||
@property
|
||||
def state_key(self):
|
||||
|
@ -102,11 +104,9 @@ class EventBuilder(object):
|
|||
"""
|
||||
|
||||
state_ids = yield self._state.get_current_state_ids(
|
||||
self.room_id, prev_event_ids,
|
||||
)
|
||||
auth_ids = yield self._auth.compute_auth_events(
|
||||
self, state_ids,
|
||||
self.room_id, prev_event_ids
|
||||
)
|
||||
auth_ids = yield self._auth.compute_auth_events(self, state_ids)
|
||||
|
||||
if self.format_version == EventFormatVersions.V1:
|
||||
auth_events = yield self._store.add_event_hashes(auth_ids)
|
||||
|
@ -115,9 +115,7 @@ class EventBuilder(object):
|
|||
auth_events = auth_ids
|
||||
prev_events = prev_event_ids
|
||||
|
||||
old_depth = yield self._store.get_max_depth_of(
|
||||
prev_event_ids,
|
||||
)
|
||||
old_depth = yield self._store.get_max_depth_of(prev_event_ids)
|
||||
depth = old_depth + 1
|
||||
|
||||
# we cap depth of generated events, to ensure that they are not
|
||||
|
@ -217,9 +215,14 @@ class EventBuilderFactory(object):
|
|||
)
|
||||
|
||||
|
||||
def create_local_event_from_event_dict(clock, hostname, signing_key,
|
||||
format_version, event_dict,
|
||||
internal_metadata_dict=None):
|
||||
def create_local_event_from_event_dict(
|
||||
clock,
|
||||
hostname,
|
||||
signing_key,
|
||||
format_version,
|
||||
event_dict,
|
||||
internal_metadata_dict=None,
|
||||
):
|
||||
"""Takes a fully formed event dict, ensuring that fields like `origin`
|
||||
and `origin_server_ts` have correct values for a locally produced event,
|
||||
then signs and hashes it.
|
||||
|
@ -237,9 +240,7 @@ def create_local_event_from_event_dict(clock, hostname, signing_key,
|
|||
"""
|
||||
|
||||
if format_version not in KNOWN_EVENT_FORMAT_VERSIONS:
|
||||
raise Exception(
|
||||
"No event format defined for version %r" % (format_version,)
|
||||
)
|
||||
raise Exception("No event format defined for version %r" % (format_version,))
|
||||
|
||||
if internal_metadata_dict is None:
|
||||
internal_metadata_dict = {}
|
||||
|
@ -258,13 +259,9 @@ def create_local_event_from_event_dict(clock, hostname, signing_key,
|
|||
|
||||
event_dict.setdefault("signatures", {})
|
||||
|
||||
add_hashes_and_signatures(
|
||||
event_dict,
|
||||
hostname,
|
||||
signing_key,
|
||||
)
|
||||
add_hashes_and_signatures(event_dict, hostname, signing_key)
|
||||
return event_type_from_format_version(format_version)(
|
||||
event_dict, internal_metadata_dict=internal_metadata_dict,
|
||||
event_dict, internal_metadata_dict=internal_metadata_dict
|
||||
)
|
||||
|
||||
|
||||
|
|
|
@ -88,8 +88,9 @@ class EventContext(object):
|
|||
self.app_service = None
|
||||
|
||||
@staticmethod
|
||||
def with_state(state_group, current_state_ids, prev_state_ids,
|
||||
prev_group=None, delta_ids=None):
|
||||
def with_state(
|
||||
state_group, current_state_ids, prev_state_ids, prev_group=None, delta_ids=None
|
||||
):
|
||||
context = EventContext()
|
||||
|
||||
# The current state including the current event
|
||||
|
@ -132,17 +133,19 @@ class EventContext(object):
|
|||
else:
|
||||
prev_state_id = None
|
||||
|
||||
defer.returnValue({
|
||||
"prev_state_id": prev_state_id,
|
||||
"event_type": event.type,
|
||||
"event_state_key": event.state_key if event.is_state() else None,
|
||||
"state_group": self.state_group,
|
||||
"rejected": self.rejected,
|
||||
"prev_group": self.prev_group,
|
||||
"delta_ids": _encode_state_dict(self.delta_ids),
|
||||
"prev_state_events": self.prev_state_events,
|
||||
"app_service_id": self.app_service.id if self.app_service else None
|
||||
})
|
||||
defer.returnValue(
|
||||
{
|
||||
"prev_state_id": prev_state_id,
|
||||
"event_type": event.type,
|
||||
"event_state_key": event.state_key if event.is_state() else None,
|
||||
"state_group": self.state_group,
|
||||
"rejected": self.rejected,
|
||||
"prev_group": self.prev_group,
|
||||
"delta_ids": _encode_state_dict(self.delta_ids),
|
||||
"prev_state_events": self.prev_state_events,
|
||||
"app_service_id": self.app_service.id if self.app_service else None,
|
||||
}
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def deserialize(store, input):
|
||||
|
@ -194,7 +197,7 @@ class EventContext(object):
|
|||
|
||||
if not self._fetching_state_deferred:
|
||||
self._fetching_state_deferred = run_in_background(
|
||||
self._fill_out_state, store,
|
||||
self._fill_out_state, store
|
||||
)
|
||||
|
||||
yield make_deferred_yieldable(self._fetching_state_deferred)
|
||||
|
@ -214,7 +217,7 @@ class EventContext(object):
|
|||
|
||||
if not self._fetching_state_deferred:
|
||||
self._fetching_state_deferred = run_in_background(
|
||||
self._fill_out_state, store,
|
||||
self._fill_out_state, store
|
||||
)
|
||||
|
||||
yield make_deferred_yieldable(self._fetching_state_deferred)
|
||||
|
@ -240,9 +243,7 @@ class EventContext(object):
|
|||
if self.state_group is None:
|
||||
return
|
||||
|
||||
self._current_state_ids = yield store.get_state_ids_for_group(
|
||||
self.state_group,
|
||||
)
|
||||
self._current_state_ids = yield store.get_state_ids_for_group(self.state_group)
|
||||
if self._prev_state_id and self._event_state_key is not None:
|
||||
self._prev_state_ids = dict(self._current_state_ids)
|
||||
|
||||
|
@ -252,8 +253,9 @@ class EventContext(object):
|
|||
self._prev_state_ids = self._current_state_ids
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def update_state(self, state_group, prev_state_ids, current_state_ids,
|
||||
prev_group, delta_ids):
|
||||
def update_state(
|
||||
self, state_group, prev_state_ids, current_state_ids, prev_group, delta_ids
|
||||
):
|
||||
"""Replace the state in the context
|
||||
"""
|
||||
|
||||
|
@ -279,10 +281,7 @@ def _encode_state_dict(state_dict):
|
|||
if state_dict is None:
|
||||
return None
|
||||
|
||||
return [
|
||||
(etype, state_key, v)
|
||||
for (etype, state_key), v in iteritems(state_dict)
|
||||
]
|
||||
return [(etype, state_key, v) for (etype, state_key), v in iteritems(state_dict)]
|
||||
|
||||
|
||||
def _decode_state_dict(input):
|
||||
|
@ -291,4 +290,4 @@ def _decode_state_dict(input):
|
|||
if input is None:
|
||||
return None
|
||||
|
||||
return frozendict({(etype, state_key,): v for etype, state_key, v in input})
|
||||
return frozendict({(etype, state_key): v for etype, state_key, v in input})
|
||||
|
|
|
@ -60,7 +60,9 @@ class SpamChecker(object):
|
|||
if self.spam_checker is None:
|
||||
return True
|
||||
|
||||
return self.spam_checker.user_may_invite(inviter_userid, invitee_userid, room_id)
|
||||
return self.spam_checker.user_may_invite(
|
||||
inviter_userid, invitee_userid, room_id
|
||||
)
|
||||
|
||||
def user_may_create_room(self, userid):
|
||||
"""Checks if a given user may create a room
|
||||
|
|
|
@ -36,8 +36,7 @@ class ThirdPartyEventRules(object):
|
|||
|
||||
if module is not None:
|
||||
self.third_party_rules = module(
|
||||
config=config,
|
||||
http_client=hs.get_simple_http_client(),
|
||||
config=config, http_client=hs.get_simple_http_client()
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
|
@ -109,6 +108,6 @@ class ThirdPartyEventRules(object):
|
|||
state_events[key] = room_state_events[event_id]
|
||||
|
||||
ret = yield self.third_party_rules.check_threepid_can_be_invited(
|
||||
medium, address, state_events,
|
||||
medium, address, state_events
|
||||
)
|
||||
defer.returnValue(ret)
|
||||
|
|
|
@ -31,7 +31,7 @@ from . import EventBase
|
|||
# by a match for 'stuff'.
|
||||
# TODO: This is fast, but fails to handle "foo\\.bar" which should be treated as
|
||||
# the literal fields "foo\" and "bar" but will instead be treated as "foo\\.bar"
|
||||
SPLIT_FIELD_REGEX = re.compile(r'(?<!\\)\.')
|
||||
SPLIT_FIELD_REGEX = re.compile(r"(?<!\\)\.")
|
||||
|
||||
|
||||
def prune_event(event):
|
||||
|
@ -51,6 +51,7 @@ def prune_event(event):
|
|||
pruned_event_dict = prune_event_dict(event.get_dict())
|
||||
|
||||
from . import event_type_from_format_version
|
||||
|
||||
return event_type_from_format_version(event.format_version)(
|
||||
pruned_event_dict, event.internal_metadata.get_dict()
|
||||
)
|
||||
|
@ -116,11 +117,7 @@ def prune_event_dict(event_dict):
|
|||
elif event_type == EventTypes.RoomHistoryVisibility:
|
||||
add_fields("history_visibility")
|
||||
|
||||
allowed_fields = {
|
||||
k: v
|
||||
for k, v in event_dict.items()
|
||||
if k in allowed_keys
|
||||
}
|
||||
allowed_fields = {k: v for k, v in event_dict.items() if k in allowed_keys}
|
||||
|
||||
allowed_fields["content"] = new_content
|
||||
|
||||
|
@ -205,7 +202,7 @@ def only_fields(dictionary, fields):
|
|||
# for each element of the output array of arrays:
|
||||
# remove escaping so we can use the right key names.
|
||||
split_fields[:] = [
|
||||
[f.replace(r'\.', r'.') for f in field_array] for field_array in split_fields
|
||||
[f.replace(r"\.", r".") for f in field_array] for field_array in split_fields
|
||||
]
|
||||
|
||||
output = {}
|
||||
|
@ -226,7 +223,10 @@ def format_event_for_client_v1(d):
|
|||
d["user_id"] = sender
|
||||
|
||||
copy_keys = (
|
||||
"age", "redacted_because", "replaces_state", "prev_content",
|
||||
"age",
|
||||
"redacted_because",
|
||||
"replaces_state",
|
||||
"prev_content",
|
||||
"invite_room_state",
|
||||
)
|
||||
for key in copy_keys:
|
||||
|
@ -238,8 +238,13 @@ def format_event_for_client_v1(d):
|
|||
|
||||
def format_event_for_client_v2(d):
|
||||
drop_keys = (
|
||||
"auth_events", "prev_events", "hashes", "signatures", "depth",
|
||||
"origin", "prev_state",
|
||||
"auth_events",
|
||||
"prev_events",
|
||||
"hashes",
|
||||
"signatures",
|
||||
"depth",
|
||||
"origin",
|
||||
"prev_state",
|
||||
)
|
||||
for key in drop_keys:
|
||||
d.pop(key, None)
|
||||
|
@ -252,9 +257,15 @@ def format_event_for_client_v2_without_room_id(d):
|
|||
return d
|
||||
|
||||
|
||||
def serialize_event(e, time_now_ms, as_client_event=True,
|
||||
event_format=format_event_for_client_v1,
|
||||
token_id=None, only_event_fields=None, is_invite=False):
|
||||
def serialize_event(
|
||||
e,
|
||||
time_now_ms,
|
||||
as_client_event=True,
|
||||
event_format=format_event_for_client_v1,
|
||||
token_id=None,
|
||||
only_event_fields=None,
|
||||
is_invite=False,
|
||||
):
|
||||
"""Serialize event for clients
|
||||
|
||||
Args:
|
||||
|
@ -288,8 +299,7 @@ def serialize_event(e, time_now_ms, as_client_event=True,
|
|||
|
||||
if "redacted_because" in e.unsigned:
|
||||
d["unsigned"]["redacted_because"] = serialize_event(
|
||||
e.unsigned["redacted_because"], time_now_ms,
|
||||
event_format=event_format
|
||||
e.unsigned["redacted_because"], time_now_ms, event_format=event_format
|
||||
)
|
||||
|
||||
if token_id is not None:
|
||||
|
@ -308,8 +318,9 @@ def serialize_event(e, time_now_ms, as_client_event=True,
|
|||
d = event_format(d)
|
||||
|
||||
if only_event_fields:
|
||||
if (not isinstance(only_event_fields, list) or
|
||||
not all(isinstance(f, string_types) for f in only_event_fields)):
|
||||
if not isinstance(only_event_fields, list) or not all(
|
||||
isinstance(f, string_types) for f in only_event_fields
|
||||
):
|
||||
raise TypeError("only_event_fields must be a list of strings")
|
||||
d = only_fields(d, only_event_fields)
|
||||
|
||||
|
@ -352,11 +363,9 @@ class EventClientSerializer(object):
|
|||
# If MSC1849 is enabled then we need to look if thre are any relations
|
||||
# we need to bundle in with the event
|
||||
if self.experimental_msc1849_support_enabled and bundle_aggregations:
|
||||
annotations = yield self.store.get_aggregation_groups_for_event(
|
||||
event_id,
|
||||
)
|
||||
annotations = yield self.store.get_aggregation_groups_for_event(event_id)
|
||||
references = yield self.store.get_relations_for_event(
|
||||
event_id, RelationTypes.REFERENCE, direction="f",
|
||||
event_id, RelationTypes.REFERENCE, direction="f"
|
||||
)
|
||||
|
||||
if annotations.chunk:
|
||||
|
@ -383,9 +392,7 @@ class EventClientSerializer(object):
|
|||
serialized_event["content"].pop("m.relates_to", None)
|
||||
|
||||
r = serialized_event["unsigned"].setdefault("m.relations", {})
|
||||
r[RelationTypes.REPLACE] = {
|
||||
"event_id": edit.event_id,
|
||||
}
|
||||
r[RelationTypes.REPLACE] = {"event_id": edit.event_id}
|
||||
|
||||
defer.returnValue(serialized_event)
|
||||
|
||||
|
@ -401,6 +408,5 @@ class EventClientSerializer(object):
|
|||
Deferred[list[dict]]: The list of serialized events
|
||||
"""
|
||||
return yieldable_gather_results(
|
||||
self.serialize_event, events,
|
||||
time_now=time_now, **kwargs
|
||||
self.serialize_event, events, time_now=time_now, **kwargs
|
||||
)
|
||||
|
|
|
@ -48,9 +48,7 @@ class EventValidator(object):
|
|||
raise SynapseError(400, "Event does not have key %s" % (k,))
|
||||
|
||||
# Check that the following keys have string values
|
||||
event_strings = [
|
||||
"origin",
|
||||
]
|
||||
event_strings = ["origin"]
|
||||
|
||||
for s in event_strings:
|
||||
if not isinstance(getattr(event, s), string_types):
|
||||
|
@ -62,8 +60,10 @@ class EventValidator(object):
|
|||
if len(alias) > MAX_ALIAS_LENGTH:
|
||||
raise SynapseError(
|
||||
400,
|
||||
("Can't create aliases longer than"
|
||||
" %d characters" % (MAX_ALIAS_LENGTH,)),
|
||||
(
|
||||
"Can't create aliases longer than"
|
||||
" %d characters" % (MAX_ALIAS_LENGTH,)
|
||||
),
|
||||
Codes.INVALID_PARAM,
|
||||
)
|
||||
|
||||
|
@ -76,11 +76,7 @@ class EventValidator(object):
|
|||
event (EventBuilder|FrozenEvent)
|
||||
"""
|
||||
|
||||
strings = [
|
||||
"room_id",
|
||||
"sender",
|
||||
"type",
|
||||
]
|
||||
strings = ["room_id", "sender", "type"]
|
||||
|
||||
if hasattr(event, "state_key"):
|
||||
strings.append("state_key")
|
||||
|
@ -93,10 +89,7 @@ class EventValidator(object):
|
|||
UserID.from_string(event.sender)
|
||||
|
||||
if event.type == EventTypes.Message:
|
||||
strings = [
|
||||
"body",
|
||||
"msgtype",
|
||||
]
|
||||
strings = ["body", "msgtype"]
|
||||
|
||||
self._ensure_strings(event.content, strings)
|
||||
|
||||
|
|
|
@ -44,8 +44,9 @@ class FederationBase(object):
|
|||
self._clock = hs.get_clock()
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _check_sigs_and_hash_and_fetch(self, origin, pdus, room_version,
|
||||
outlier=False, include_none=False):
|
||||
def _check_sigs_and_hash_and_fetch(
|
||||
self, origin, pdus, room_version, outlier=False, include_none=False
|
||||
):
|
||||
"""Takes a list of PDUs and checks the signatures and hashs of each
|
||||
one. If a PDU fails its signature check then we check if we have it in
|
||||
the database and if not then request if from the originating server of
|
||||
|
@ -79,9 +80,7 @@ class FederationBase(object):
|
|||
if not res:
|
||||
# Check local db.
|
||||
res = yield self.store.get_event(
|
||||
pdu.event_id,
|
||||
allow_rejected=True,
|
||||
allow_none=True,
|
||||
pdu.event_id, allow_rejected=True, allow_none=True
|
||||
)
|
||||
|
||||
if not res and pdu.origin != origin:
|
||||
|
@ -98,23 +97,16 @@ class FederationBase(object):
|
|||
|
||||
if not res:
|
||||
logger.warn(
|
||||
"Failed to find copy of %s with valid signature",
|
||||
pdu.event_id,
|
||||
"Failed to find copy of %s with valid signature", pdu.event_id
|
||||
)
|
||||
|
||||
defer.returnValue(res)
|
||||
|
||||
handle = logcontext.preserve_fn(handle_check_result)
|
||||
deferreds2 = [
|
||||
handle(pdu, deferred)
|
||||
for pdu, deferred in zip(pdus, deferreds)
|
||||
]
|
||||
deferreds2 = [handle(pdu, deferred) for pdu, deferred in zip(pdus, deferreds)]
|
||||
|
||||
valid_pdus = yield logcontext.make_deferred_yieldable(
|
||||
defer.gatherResults(
|
||||
deferreds2,
|
||||
consumeErrors=True,
|
||||
)
|
||||
defer.gatherResults(deferreds2, consumeErrors=True)
|
||||
).addErrback(unwrapFirstError)
|
||||
|
||||
if include_none:
|
||||
|
@ -124,7 +116,7 @@ class FederationBase(object):
|
|||
|
||||
def _check_sigs_and_hash(self, room_version, pdu):
|
||||
return logcontext.make_deferred_yieldable(
|
||||
self._check_sigs_and_hashes(room_version, [pdu])[0],
|
||||
self._check_sigs_and_hashes(room_version, [pdu])[0]
|
||||
)
|
||||
|
||||
def _check_sigs_and_hashes(self, room_version, pdus):
|
||||
|
@ -159,11 +151,9 @@ class FederationBase(object):
|
|||
# received event was probably a redacted copy (but we then use our
|
||||
# *actual* redacted copy to be on the safe side.)
|
||||
redacted_event = prune_event(pdu)
|
||||
if (
|
||||
set(redacted_event.keys()) == set(pdu.keys()) and
|
||||
set(six.iterkeys(redacted_event.content))
|
||||
== set(six.iterkeys(pdu.content))
|
||||
):
|
||||
if set(redacted_event.keys()) == set(pdu.keys()) and set(
|
||||
six.iterkeys(redacted_event.content)
|
||||
) == set(six.iterkeys(pdu.content)):
|
||||
logger.info(
|
||||
"Event %s seems to have been redacted; using our redacted "
|
||||
"copy",
|
||||
|
@ -172,14 +162,16 @@ class FederationBase(object):
|
|||
else:
|
||||
logger.warning(
|
||||
"Event %s content has been tampered, redacting",
|
||||
pdu.event_id, pdu.get_pdu_json(),
|
||||
pdu.event_id,
|
||||
pdu.get_pdu_json(),
|
||||
)
|
||||
return redacted_event
|
||||
|
||||
if self.spam_checker.check_event_for_spam(pdu):
|
||||
logger.warn(
|
||||
"Event contains spam, redacting %s: %s",
|
||||
pdu.event_id, pdu.get_pdu_json()
|
||||
pdu.event_id,
|
||||
pdu.get_pdu_json(),
|
||||
)
|
||||
return prune_event(pdu)
|
||||
|
||||
|
@ -190,23 +182,24 @@ class FederationBase(object):
|
|||
with logcontext.PreserveLoggingContext(ctx):
|
||||
logger.warn(
|
||||
"Signature check failed for %s: %s",
|
||||
pdu.event_id, failure.getErrorMessage(),
|
||||
pdu.event_id,
|
||||
failure.getErrorMessage(),
|
||||
)
|
||||
return failure
|
||||
|
||||
for deferred, pdu in zip(deferreds, pdus):
|
||||
deferred.addCallbacks(
|
||||
callback, errback,
|
||||
callbackArgs=[pdu],
|
||||
errbackArgs=[pdu],
|
||||
callback, errback, callbackArgs=[pdu], errbackArgs=[pdu]
|
||||
)
|
||||
|
||||
return deferreds
|
||||
|
||||
|
||||
class PduToCheckSig(namedtuple("PduToCheckSig", [
|
||||
"pdu", "redacted_pdu_json", "sender_domain", "deferreds",
|
||||
])):
|
||||
class PduToCheckSig(
|
||||
namedtuple(
|
||||
"PduToCheckSig", ["pdu", "redacted_pdu_json", "sender_domain", "deferreds"]
|
||||
)
|
||||
):
|
||||
pass
|
||||
|
||||
|
||||
|
@ -260,10 +253,7 @@ def _check_sigs_on_pdus(keyring, room_version, pdus):
|
|||
|
||||
# First we check that the sender event is signed by the sender's domain
|
||||
# (except if its a 3pid invite, in which case it may be sent by any server)
|
||||
pdus_to_check_sender = [
|
||||
p for p in pdus_to_check
|
||||
if not _is_invite_via_3pid(p.pdu)
|
||||
]
|
||||
pdus_to_check_sender = [p for p in pdus_to_check if not _is_invite_via_3pid(p.pdu)]
|
||||
|
||||
more_deferreds = keyring.verify_json_objects_for_server(
|
||||
[
|
||||
|
@ -297,7 +287,8 @@ def _check_sigs_on_pdus(keyring, room_version, pdus):
|
|||
# (ie, the room version uses old-style non-hash event IDs).
|
||||
if v.event_format == EventFormatVersions.V1:
|
||||
pdus_to_check_event_id = [
|
||||
p for p in pdus_to_check
|
||||
p
|
||||
for p in pdus_to_check
|
||||
if p.sender_domain != get_domain_from_id(p.pdu.event_id)
|
||||
]
|
||||
|
||||
|
@ -315,10 +306,8 @@ def _check_sigs_on_pdus(keyring, room_version, pdus):
|
|||
|
||||
def event_err(e, pdu_to_check):
|
||||
errmsg = (
|
||||
"event id %s: unable to verify signature for event id domain: %s" % (
|
||||
pdu_to_check.pdu.event_id,
|
||||
e.getErrorMessage(),
|
||||
)
|
||||
"event id %s: unable to verify signature for event id domain: %s"
|
||||
% (pdu_to_check.pdu.event_id, e.getErrorMessage())
|
||||
)
|
||||
# XX as above: not really sure if these are the right codes
|
||||
raise SynapseError(400, errmsg, Codes.UNAUTHORIZED)
|
||||
|
@ -368,21 +357,18 @@ def event_from_pdu_json(pdu_json, event_format_version, outlier=False):
|
|||
"""
|
||||
# we could probably enforce a bunch of other fields here (room_id, sender,
|
||||
# origin, etc etc)
|
||||
assert_params_in_dict(pdu_json, ('type', 'depth'))
|
||||
assert_params_in_dict(pdu_json, ("type", "depth"))
|
||||
|
||||
depth = pdu_json['depth']
|
||||
depth = pdu_json["depth"]
|
||||
if not isinstance(depth, six.integer_types):
|
||||
raise SynapseError(400, "Depth %r not an intger" % (depth, ),
|
||||
Codes.BAD_JSON)
|
||||
raise SynapseError(400, "Depth %r not an intger" % (depth,), Codes.BAD_JSON)
|
||||
|
||||
if depth < 0:
|
||||
raise SynapseError(400, "Depth too small", Codes.BAD_JSON)
|
||||
elif depth > MAX_DEPTH:
|
||||
raise SynapseError(400, "Depth too large", Codes.BAD_JSON)
|
||||
|
||||
event = event_type_from_format_version(event_format_version)(
|
||||
pdu_json,
|
||||
)
|
||||
event = event_type_from_format_version(event_format_version)(pdu_json)
|
||||
|
||||
event.internal_metadata.outlier = outlier
|
||||
|
||||
|
|
|
@ -57,6 +57,7 @@ class InvalidResponseError(RuntimeError):
|
|||
"""Helper for _try_destination_list: indicates that the server returned a response
|
||||
we couldn't parse
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
|
@ -65,9 +66,7 @@ class FederationClient(FederationBase):
|
|||
super(FederationClient, self).__init__(hs)
|
||||
|
||||
self.pdu_destination_tried = {}
|
||||
self._clock.looping_call(
|
||||
self._clear_tried_cache, 60 * 1000,
|
||||
)
|
||||
self._clock.looping_call(self._clear_tried_cache, 60 * 1000)
|
||||
self.state = hs.get_state_handler()
|
||||
self.transport_layer = hs.get_federation_transport_client()
|
||||
|
||||
|
@ -99,8 +98,14 @@ class FederationClient(FederationBase):
|
|||
self.pdu_destination_tried[event_id] = destination_dict
|
||||
|
||||
@log_function
|
||||
def make_query(self, destination, query_type, args,
|
||||
retry_on_dns_fail=False, ignore_backoff=False):
|
||||
def make_query(
|
||||
self,
|
||||
destination,
|
||||
query_type,
|
||||
args,
|
||||
retry_on_dns_fail=False,
|
||||
ignore_backoff=False,
|
||||
):
|
||||
"""Sends a federation Query to a remote homeserver of the given type
|
||||
and arguments.
|
||||
|
||||
|
@ -120,7 +125,10 @@ class FederationClient(FederationBase):
|
|||
sent_queries_counter.labels(query_type).inc()
|
||||
|
||||
return self.transport_layer.make_query(
|
||||
destination, query_type, args, retry_on_dns_fail=retry_on_dns_fail,
|
||||
destination,
|
||||
query_type,
|
||||
args,
|
||||
retry_on_dns_fail=retry_on_dns_fail,
|
||||
ignore_backoff=ignore_backoff,
|
||||
)
|
||||
|
||||
|
@ -137,9 +145,7 @@ class FederationClient(FederationBase):
|
|||
response
|
||||
"""
|
||||
sent_queries_counter.labels("client_device_keys").inc()
|
||||
return self.transport_layer.query_client_keys(
|
||||
destination, content, timeout
|
||||
)
|
||||
return self.transport_layer.query_client_keys(destination, content, timeout)
|
||||
|
||||
@log_function
|
||||
def query_user_devices(self, destination, user_id, timeout=30000):
|
||||
|
@ -147,9 +153,7 @@ class FederationClient(FederationBase):
|
|||
server.
|
||||
"""
|
||||
sent_queries_counter.labels("user_devices").inc()
|
||||
return self.transport_layer.query_user_devices(
|
||||
destination, user_id, timeout
|
||||
)
|
||||
return self.transport_layer.query_user_devices(destination, user_id, timeout)
|
||||
|
||||
@log_function
|
||||
def claim_client_keys(self, destination, content, timeout):
|
||||
|
@ -164,9 +168,7 @@ class FederationClient(FederationBase):
|
|||
response
|
||||
"""
|
||||
sent_queries_counter.labels("client_one_time_keys").inc()
|
||||
return self.transport_layer.claim_client_keys(
|
||||
destination, content, timeout
|
||||
)
|
||||
return self.transport_layer.claim_client_keys(destination, content, timeout)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
@log_function
|
||||
|
@ -191,7 +193,8 @@ class FederationClient(FederationBase):
|
|||
return
|
||||
|
||||
transaction_data = yield self.transport_layer.backfill(
|
||||
dest, room_id, extremities, limit)
|
||||
dest, room_id, extremities, limit
|
||||
)
|
||||
|
||||
logger.debug("backfill transaction_data=%s", repr(transaction_data))
|
||||
|
||||
|
@ -204,17 +207,19 @@ class FederationClient(FederationBase):
|
|||
]
|
||||
|
||||
# FIXME: We should handle signature failures more gracefully.
|
||||
pdus[:] = yield logcontext.make_deferred_yieldable(defer.gatherResults(
|
||||
self._check_sigs_and_hashes(room_version, pdus),
|
||||
consumeErrors=True,
|
||||
).addErrback(unwrapFirstError))
|
||||
pdus[:] = yield logcontext.make_deferred_yieldable(
|
||||
defer.gatherResults(
|
||||
self._check_sigs_and_hashes(room_version, pdus), consumeErrors=True
|
||||
).addErrback(unwrapFirstError)
|
||||
)
|
||||
|
||||
defer.returnValue(pdus)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
@log_function
|
||||
def get_pdu(self, destinations, event_id, room_version, outlier=False,
|
||||
timeout=None):
|
||||
def get_pdu(
|
||||
self, destinations, event_id, room_version, outlier=False, timeout=None
|
||||
):
|
||||
"""Requests the PDU with given origin and ID from the remote home
|
||||
servers.
|
||||
|
||||
|
@ -255,7 +260,7 @@ class FederationClient(FederationBase):
|
|||
|
||||
try:
|
||||
transaction_data = yield self.transport_layer.get_event(
|
||||
destination, event_id, timeout=timeout,
|
||||
destination, event_id, timeout=timeout
|
||||
)
|
||||
|
||||
logger.debug(
|
||||
|
@ -282,8 +287,7 @@ class FederationClient(FederationBase):
|
|||
|
||||
except SynapseError as e:
|
||||
logger.info(
|
||||
"Failed to get PDU %s from %s because %s",
|
||||
event_id, destination, e,
|
||||
"Failed to get PDU %s from %s because %s", event_id, destination, e
|
||||
)
|
||||
continue
|
||||
except NotRetryingDestination as e:
|
||||
|
@ -296,8 +300,7 @@ class FederationClient(FederationBase):
|
|||
pdu_attempts[destination] = now
|
||||
|
||||
logger.info(
|
||||
"Failed to get PDU %s from %s because %s",
|
||||
event_id, destination, e,
|
||||
"Failed to get PDU %s from %s because %s", event_id, destination, e
|
||||
)
|
||||
continue
|
||||
|
||||
|
@ -326,7 +329,7 @@ class FederationClient(FederationBase):
|
|||
# we have most of the state and auth_chain already.
|
||||
# However, this may 404 if the other side has an old synapse.
|
||||
result = yield self.transport_layer.get_room_state_ids(
|
||||
destination, room_id, event_id=event_id,
|
||||
destination, room_id, event_id=event_id
|
||||
)
|
||||
|
||||
state_event_ids = result["pdu_ids"]
|
||||
|
@ -340,12 +343,10 @@ class FederationClient(FederationBase):
|
|||
logger.warning(
|
||||
"Failed to fetch missing state/auth events for %s: %s",
|
||||
room_id,
|
||||
failed_to_fetch
|
||||
failed_to_fetch,
|
||||
)
|
||||
|
||||
event_map = {
|
||||
ev.event_id: ev for ev in fetched_events
|
||||
}
|
||||
event_map = {ev.event_id: ev for ev in fetched_events}
|
||||
|
||||
pdus = [event_map[e_id] for e_id in state_event_ids if e_id in event_map]
|
||||
auth_chain = [
|
||||
|
@ -362,15 +363,14 @@ class FederationClient(FederationBase):
|
|||
raise e
|
||||
|
||||
result = yield self.transport_layer.get_room_state(
|
||||
destination, room_id, event_id=event_id,
|
||||
destination, room_id, event_id=event_id
|
||||
)
|
||||
|
||||
room_version = yield self.store.get_room_version(room_id)
|
||||
format_ver = room_version_to_event_format(room_version)
|
||||
|
||||
pdus = [
|
||||
event_from_pdu_json(p, format_ver, outlier=True)
|
||||
for p in result["pdus"]
|
||||
event_from_pdu_json(p, format_ver, outlier=True) for p in result["pdus"]
|
||||
]
|
||||
|
||||
auth_chain = [
|
||||
|
@ -378,9 +378,9 @@ class FederationClient(FederationBase):
|
|||
for p in result.get("auth_chain", [])
|
||||
]
|
||||
|
||||
seen_events = yield self.store.get_events([
|
||||
ev.event_id for ev in itertools.chain(pdus, auth_chain)
|
||||
])
|
||||
seen_events = yield self.store.get_events(
|
||||
[ev.event_id for ev in itertools.chain(pdus, auth_chain)]
|
||||
)
|
||||
|
||||
signed_pdus = yield self._check_sigs_and_hash_and_fetch(
|
||||
destination,
|
||||
|
@ -442,7 +442,7 @@ class FederationClient(FederationBase):
|
|||
batch_size = 20
|
||||
missing_events = list(missing_events)
|
||||
for i in range(0, len(missing_events), batch_size):
|
||||
batch = set(missing_events[i:i + batch_size])
|
||||
batch = set(missing_events[i : i + batch_size])
|
||||
|
||||
deferreds = [
|
||||
run_in_background(
|
||||
|
@ -470,21 +470,17 @@ class FederationClient(FederationBase):
|
|||
@defer.inlineCallbacks
|
||||
@log_function
|
||||
def get_event_auth(self, destination, room_id, event_id):
|
||||
res = yield self.transport_layer.get_event_auth(
|
||||
destination, room_id, event_id,
|
||||
)
|
||||
res = yield self.transport_layer.get_event_auth(destination, room_id, event_id)
|
||||
|
||||
room_version = yield self.store.get_room_version(room_id)
|
||||
format_ver = room_version_to_event_format(room_version)
|
||||
|
||||
auth_chain = [
|
||||
event_from_pdu_json(p, format_ver, outlier=True)
|
||||
for p in res["auth_chain"]
|
||||
event_from_pdu_json(p, format_ver, outlier=True) for p in res["auth_chain"]
|
||||
]
|
||||
|
||||
signed_auth = yield self._check_sigs_and_hash_and_fetch(
|
||||
destination, auth_chain,
|
||||
outlier=True, room_version=room_version,
|
||||
destination, auth_chain, outlier=True, room_version=room_version
|
||||
)
|
||||
|
||||
signed_auth.sort(key=lambda e: e.depth)
|
||||
|
@ -527,28 +523,26 @@ class FederationClient(FederationBase):
|
|||
res = yield callback(destination)
|
||||
defer.returnValue(res)
|
||||
except InvalidResponseError as e:
|
||||
logger.warn(
|
||||
"Failed to %s via %s: %s",
|
||||
description, destination, e,
|
||||
)
|
||||
logger.warn("Failed to %s via %s: %s", description, destination, e)
|
||||
except HttpResponseException as e:
|
||||
if not 500 <= e.code < 600:
|
||||
raise e.to_synapse_error()
|
||||
else:
|
||||
logger.warn(
|
||||
"Failed to %s via %s: %i %s",
|
||||
description, destination, e.code, e.args[0],
|
||||
description,
|
||||
destination,
|
||||
e.code,
|
||||
e.args[0],
|
||||
)
|
||||
except Exception:
|
||||
logger.warn(
|
||||
"Failed to %s via %s",
|
||||
description, destination, exc_info=1,
|
||||
)
|
||||
logger.warn("Failed to %s via %s", description, destination, exc_info=1)
|
||||
|
||||
raise RuntimeError("Failed to %s via any server" % (description, ))
|
||||
raise RuntimeError("Failed to %s via any server" % (description,))
|
||||
|
||||
def make_membership_event(self, destinations, room_id, user_id, membership,
|
||||
content, params):
|
||||
def make_membership_event(
|
||||
self, destinations, room_id, user_id, membership, content, params
|
||||
):
|
||||
"""
|
||||
Creates an m.room.member event, with context, without participating in the room.
|
||||
|
||||
|
@ -584,14 +578,14 @@ class FederationClient(FederationBase):
|
|||
valid_memberships = {Membership.JOIN, Membership.LEAVE}
|
||||
if membership not in valid_memberships:
|
||||
raise RuntimeError(
|
||||
"make_membership_event called with membership='%s', must be one of %s" %
|
||||
(membership, ",".join(valid_memberships))
|
||||
"make_membership_event called with membership='%s', must be one of %s"
|
||||
% (membership, ",".join(valid_memberships))
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def send_request(destination):
|
||||
ret = yield self.transport_layer.make_membership_event(
|
||||
destination, room_id, user_id, membership, params,
|
||||
destination, room_id, user_id, membership, params
|
||||
)
|
||||
|
||||
# Note: If not supplied, the room version may be either v1 or v2,
|
||||
|
@ -614,16 +608,17 @@ class FederationClient(FederationBase):
|
|||
pdu_dict["prev_state"] = []
|
||||
|
||||
ev = builder.create_local_event_from_event_dict(
|
||||
self._clock, self.hostname, self.signing_key,
|
||||
format_version=event_format, event_dict=pdu_dict,
|
||||
self._clock,
|
||||
self.hostname,
|
||||
self.signing_key,
|
||||
format_version=event_format,
|
||||
event_dict=pdu_dict,
|
||||
)
|
||||
|
||||
defer.returnValue(
|
||||
(destination, ev, event_format)
|
||||
)
|
||||
defer.returnValue((destination, ev, event_format))
|
||||
|
||||
return self._try_destination_list(
|
||||
"make_" + membership, destinations, send_request,
|
||||
"make_" + membership, destinations, send_request
|
||||
)
|
||||
|
||||
def send_join(self, destinations, pdu, event_format_version):
|
||||
|
@ -655,9 +650,7 @@ class FederationClient(FederationBase):
|
|||
create_event = e
|
||||
break
|
||||
else:
|
||||
raise InvalidResponseError(
|
||||
"no %s in auth chain" % (EventTypes.Create,),
|
||||
)
|
||||
raise InvalidResponseError("no %s in auth chain" % (EventTypes.Create,))
|
||||
|
||||
# the room version should be sane.
|
||||
room_version = create_event.content.get("room_version", "1")
|
||||
|
@ -665,9 +658,8 @@ class FederationClient(FederationBase):
|
|||
# This shouldn't be possible, because the remote server should have
|
||||
# rejected the join attempt during make_join.
|
||||
raise InvalidResponseError(
|
||||
"room appears to have unsupported version %s" % (
|
||||
room_version,
|
||||
))
|
||||
"room appears to have unsupported version %s" % (room_version,)
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def send_request(destination):
|
||||
|
@ -691,10 +683,7 @@ class FederationClient(FederationBase):
|
|||
for p in content.get("auth_chain", [])
|
||||
]
|
||||
|
||||
pdus = {
|
||||
p.event_id: p
|
||||
for p in itertools.chain(state, auth_chain)
|
||||
}
|
||||
pdus = {p.event_id: p for p in itertools.chain(state, auth_chain)}
|
||||
|
||||
room_version = None
|
||||
for e in state:
|
||||
|
@ -710,15 +699,13 @@ class FederationClient(FederationBase):
|
|||
raise SynapseError(400, "No create event in state")
|
||||
|
||||
valid_pdus = yield self._check_sigs_and_hash_and_fetch(
|
||||
destination, list(pdus.values()),
|
||||
destination,
|
||||
list(pdus.values()),
|
||||
outlier=True,
|
||||
room_version=room_version,
|
||||
)
|
||||
|
||||
valid_pdus_map = {
|
||||
p.event_id: p
|
||||
for p in valid_pdus
|
||||
}
|
||||
valid_pdus_map = {p.event_id: p for p in valid_pdus}
|
||||
|
||||
# NB: We *need* to copy to ensure that we don't have multiple
|
||||
# references being passed on, as that causes... issues.
|
||||
|
@ -741,11 +728,14 @@ class FederationClient(FederationBase):
|
|||
|
||||
check_authchain_validity(signed_auth)
|
||||
|
||||
defer.returnValue({
|
||||
"state": signed_state,
|
||||
"auth_chain": signed_auth,
|
||||
"origin": destination,
|
||||
})
|
||||
defer.returnValue(
|
||||
{
|
||||
"state": signed_state,
|
||||
"auth_chain": signed_auth,
|
||||
"origin": destination,
|
||||
}
|
||||
)
|
||||
|
||||
return self._try_destination_list("send_join", destinations, send_request)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
|
@ -854,6 +844,7 @@ class FederationClient(FederationBase):
|
|||
|
||||
Fails with a ``RuntimeError`` if no servers were reachable.
|
||||
"""
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def send_request(destination):
|
||||
time_now = self._clock.time_msec()
|
||||
|
@ -869,14 +860,23 @@ class FederationClient(FederationBase):
|
|||
|
||||
return self._try_destination_list("send_leave", destinations, send_request)
|
||||
|
||||
def get_public_rooms(self, destination, limit=None, since_token=None,
|
||||
search_filter=None, include_all_networks=False,
|
||||
third_party_instance_id=None):
|
||||
def get_public_rooms(
|
||||
self,
|
||||
destination,
|
||||
limit=None,
|
||||
since_token=None,
|
||||
search_filter=None,
|
||||
include_all_networks=False,
|
||||
third_party_instance_id=None,
|
||||
):
|
||||
if destination == self.server_name:
|
||||
return
|
||||
|
||||
return self.transport_layer.get_public_rooms(
|
||||
destination, limit, since_token, search_filter,
|
||||
destination,
|
||||
limit,
|
||||
since_token,
|
||||
search_filter,
|
||||
include_all_networks=include_all_networks,
|
||||
third_party_instance_id=third_party_instance_id,
|
||||
)
|
||||
|
@ -891,9 +891,7 @@ class FederationClient(FederationBase):
|
|||
"""
|
||||
time_now = self._clock.time_msec()
|
||||
|
||||
send_content = {
|
||||
"auth_chain": [e.get_pdu_json(time_now) for e in local_auth],
|
||||
}
|
||||
send_content = {"auth_chain": [e.get_pdu_json(time_now) for e in local_auth]}
|
||||
|
||||
code, content = yield self.transport_layer.send_query_auth(
|
||||
destination=destination,
|
||||
|
@ -905,13 +903,10 @@ class FederationClient(FederationBase):
|
|||
room_version = yield self.store.get_room_version(room_id)
|
||||
format_ver = room_version_to_event_format(room_version)
|
||||
|
||||
auth_chain = [
|
||||
event_from_pdu_json(e, format_ver)
|
||||
for e in content["auth_chain"]
|
||||
]
|
||||
auth_chain = [event_from_pdu_json(e, format_ver) for e in content["auth_chain"]]
|
||||
|
||||
signed_auth = yield self._check_sigs_and_hash_and_fetch(
|
||||
destination, auth_chain, outlier=True, room_version=room_version,
|
||||
destination, auth_chain, outlier=True, room_version=room_version
|
||||
)
|
||||
|
||||
signed_auth.sort(key=lambda e: e.depth)
|
||||
|
@ -925,8 +920,16 @@ class FederationClient(FederationBase):
|
|||
defer.returnValue(ret)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def get_missing_events(self, destination, room_id, earliest_events_ids,
|
||||
latest_events, limit, min_depth, timeout):
|
||||
def get_missing_events(
|
||||
self,
|
||||
destination,
|
||||
room_id,
|
||||
earliest_events_ids,
|
||||
latest_events,
|
||||
limit,
|
||||
min_depth,
|
||||
timeout,
|
||||
):
|
||||
"""Tries to fetch events we are missing. This is called when we receive
|
||||
an event without having received all of its ancestors.
|
||||
|
||||
|
@ -957,12 +960,11 @@ class FederationClient(FederationBase):
|
|||
format_ver = room_version_to_event_format(room_version)
|
||||
|
||||
events = [
|
||||
event_from_pdu_json(e, format_ver)
|
||||
for e in content.get("events", [])
|
||||
event_from_pdu_json(e, format_ver) for e in content.get("events", [])
|
||||
]
|
||||
|
||||
signed_events = yield self._check_sigs_and_hash_and_fetch(
|
||||
destination, events, outlier=False, room_version=room_version,
|
||||
destination, events, outlier=False, room_version=room_version
|
||||
)
|
||||
except HttpResponseException as e:
|
||||
if not e.code == 400:
|
||||
|
@ -982,17 +984,14 @@ class FederationClient(FederationBase):
|
|||
|
||||
try:
|
||||
yield self.transport_layer.exchange_third_party_invite(
|
||||
destination=destination,
|
||||
room_id=room_id,
|
||||
event_dict=event_dict,
|
||||
destination=destination, room_id=room_id, event_dict=event_dict
|
||||
)
|
||||
defer.returnValue(None)
|
||||
except CodeMessageException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.exception(
|
||||
"Failed to send_third_party_invite via %s: %s",
|
||||
destination, str(e)
|
||||
"Failed to send_third_party_invite via %s: %s", destination, str(e)
|
||||
)
|
||||
|
||||
raise RuntimeError("Failed to send to any server.")
|
||||
|
|
|
@ -69,7 +69,6 @@ received_queries_counter = Counter(
|
|||
|
||||
|
||||
class FederationServer(FederationBase):
|
||||
|
||||
def __init__(self, hs):
|
||||
super(FederationServer, self).__init__(hs)
|
||||
|
||||
|
@ -118,11 +117,13 @@ class FederationServer(FederationBase):
|
|||
|
||||
# use a linearizer to ensure that we don't process the same transaction
|
||||
# multiple times in parallel.
|
||||
with (yield self._transaction_linearizer.queue(
|
||||
(origin, transaction.transaction_id),
|
||||
)):
|
||||
with (
|
||||
yield self._transaction_linearizer.queue(
|
||||
(origin, transaction.transaction_id)
|
||||
)
|
||||
):
|
||||
result = yield self._handle_incoming_transaction(
|
||||
origin, transaction, request_time,
|
||||
origin, transaction, request_time
|
||||
)
|
||||
|
||||
defer.returnValue(result)
|
||||
|
@ -144,7 +145,7 @@ class FederationServer(FederationBase):
|
|||
if response:
|
||||
logger.debug(
|
||||
"[%s] We've already responded to this request",
|
||||
transaction.transaction_id
|
||||
transaction.transaction_id,
|
||||
)
|
||||
defer.returnValue(response)
|
||||
return
|
||||
|
@ -152,18 +153,15 @@ class FederationServer(FederationBase):
|
|||
logger.debug("[%s] Transaction is new", transaction.transaction_id)
|
||||
|
||||
# Reject if PDU count > 50 and EDU count > 100
|
||||
if (len(transaction.pdus) > 50
|
||||
or (hasattr(transaction, "edus") and len(transaction.edus) > 100)):
|
||||
if len(transaction.pdus) > 50 or (
|
||||
hasattr(transaction, "edus") and len(transaction.edus) > 100
|
||||
):
|
||||
|
||||
logger.info(
|
||||
"Transaction PDU or EDU count too large. Returning 400",
|
||||
)
|
||||
logger.info("Transaction PDU or EDU count too large. Returning 400")
|
||||
|
||||
response = {}
|
||||
yield self.transaction_actions.set_response(
|
||||
origin,
|
||||
transaction,
|
||||
400, response
|
||||
origin, transaction, 400, response
|
||||
)
|
||||
defer.returnValue((400, response))
|
||||
|
||||
|
@ -230,9 +228,7 @@ class FederationServer(FederationBase):
|
|||
try:
|
||||
yield self.check_server_matches_acl(origin_host, room_id)
|
||||
except AuthError as e:
|
||||
logger.warn(
|
||||
"Ignoring PDUs for room %s from banned server", room_id,
|
||||
)
|
||||
logger.warn("Ignoring PDUs for room %s from banned server", room_id)
|
||||
for pdu in pdus_by_room[room_id]:
|
||||
event_id = pdu.event_id
|
||||
pdu_results[event_id] = e.error_dict()
|
||||
|
@ -242,9 +238,7 @@ class FederationServer(FederationBase):
|
|||
event_id = pdu.event_id
|
||||
with nested_logging_context(event_id):
|
||||
try:
|
||||
yield self._handle_received_pdu(
|
||||
origin, pdu
|
||||
)
|
||||
yield self._handle_received_pdu(origin, pdu)
|
||||
pdu_results[event_id] = {}
|
||||
except FederationError as e:
|
||||
logger.warn("Error handling PDU %s: %s", event_id, e)
|
||||
|
@ -259,29 +253,18 @@ class FederationServer(FederationBase):
|
|||
)
|
||||
|
||||
yield concurrently_execute(
|
||||
process_pdus_for_room, pdus_by_room.keys(),
|
||||
TRANSACTION_CONCURRENCY_LIMIT,
|
||||
process_pdus_for_room, pdus_by_room.keys(), TRANSACTION_CONCURRENCY_LIMIT
|
||||
)
|
||||
|
||||
if hasattr(transaction, "edus"):
|
||||
for edu in (Edu(**x) for x in transaction.edus):
|
||||
yield self.received_edu(
|
||||
origin,
|
||||
edu.edu_type,
|
||||
edu.content
|
||||
)
|
||||
yield self.received_edu(origin, edu.edu_type, edu.content)
|
||||
|
||||
response = {
|
||||
"pdus": pdu_results,
|
||||
}
|
||||
response = {"pdus": pdu_results}
|
||||
|
||||
logger.debug("Returning: %s", str(response))
|
||||
|
||||
yield self.transaction_actions.set_response(
|
||||
origin,
|
||||
transaction,
|
||||
200, response
|
||||
)
|
||||
yield self.transaction_actions.set_response(origin, transaction, 200, response)
|
||||
defer.returnValue((200, response))
|
||||
|
||||
@defer.inlineCallbacks
|
||||
|
@ -311,7 +294,8 @@ class FederationServer(FederationBase):
|
|||
resp = yield self._state_resp_cache.wrap(
|
||||
(room_id, event_id),
|
||||
self._on_context_state_request_compute,
|
||||
room_id, event_id,
|
||||
room_id,
|
||||
event_id,
|
||||
)
|
||||
|
||||
defer.returnValue((200, resp))
|
||||
|
@ -328,24 +312,17 @@ class FederationServer(FederationBase):
|
|||
if not in_room:
|
||||
raise AuthError(403, "Host not in room.")
|
||||
|
||||
state_ids = yield self.handler.get_state_ids_for_pdu(
|
||||
room_id, event_id,
|
||||
)
|
||||
state_ids = yield self.handler.get_state_ids_for_pdu(room_id, event_id)
|
||||
auth_chain_ids = yield self.store.get_auth_chain_ids(state_ids)
|
||||
|
||||
defer.returnValue((200, {
|
||||
"pdu_ids": state_ids,
|
||||
"auth_chain_ids": auth_chain_ids,
|
||||
}))
|
||||
defer.returnValue(
|
||||
(200, {"pdu_ids": state_ids, "auth_chain_ids": auth_chain_ids})
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _on_context_state_request_compute(self, room_id, event_id):
|
||||
pdus = yield self.handler.get_state_for_pdu(
|
||||
room_id, event_id,
|
||||
)
|
||||
auth_chain = yield self.store.get_auth_chain(
|
||||
[pdu.event_id for pdu in pdus]
|
||||
)
|
||||
pdus = yield self.handler.get_state_for_pdu(room_id, event_id)
|
||||
auth_chain = yield self.store.get_auth_chain([pdu.event_id for pdu in pdus])
|
||||
|
||||
for event in auth_chain:
|
||||
# We sign these again because there was a bug where we
|
||||
|
@ -355,14 +332,16 @@ class FederationServer(FederationBase):
|
|||
compute_event_signature(
|
||||
event.get_pdu_json(),
|
||||
self.hs.hostname,
|
||||
self.hs.config.signing_key[0]
|
||||
self.hs.config.signing_key[0],
|
||||
)
|
||||
)
|
||||
|
||||
defer.returnValue({
|
||||
"pdus": [pdu.get_pdu_json() for pdu in pdus],
|
||||
"auth_chain": [pdu.get_pdu_json() for pdu in auth_chain],
|
||||
})
|
||||
defer.returnValue(
|
||||
{
|
||||
"pdus": [pdu.get_pdu_json() for pdu in pdus],
|
||||
"auth_chain": [pdu.get_pdu_json() for pdu in auth_chain],
|
||||
}
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
@log_function
|
||||
|
@ -370,9 +349,7 @@ class FederationServer(FederationBase):
|
|||
pdu = yield self.handler.get_persisted_pdu(origin, event_id)
|
||||
|
||||
if pdu:
|
||||
defer.returnValue(
|
||||
(200, self._transaction_from_pdus([pdu]).get_dict())
|
||||
)
|
||||
defer.returnValue((200, self._transaction_from_pdus([pdu]).get_dict()))
|
||||
else:
|
||||
defer.returnValue((404, ""))
|
||||
|
||||
|
@ -394,10 +371,9 @@ class FederationServer(FederationBase):
|
|||
|
||||
pdu = yield self.handler.on_make_join_request(room_id, user_id)
|
||||
time_now = self._clock.time_msec()
|
||||
defer.returnValue({
|
||||
"event": pdu.get_pdu_json(time_now),
|
||||
"room_version": room_version,
|
||||
})
|
||||
defer.returnValue(
|
||||
{"event": pdu.get_pdu_json(time_now), "room_version": room_version}
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_invite_request(self, origin, content, room_version):
|
||||
|
@ -431,12 +407,17 @@ class FederationServer(FederationBase):
|
|||
logger.debug("on_send_join_request: pdu sigs: %s", pdu.signatures)
|
||||
res_pdus = yield self.handler.on_send_join_request(origin, pdu)
|
||||
time_now = self._clock.time_msec()
|
||||
defer.returnValue((200, {
|
||||
"state": [p.get_pdu_json(time_now) for p in res_pdus["state"]],
|
||||
"auth_chain": [
|
||||
p.get_pdu_json(time_now) for p in res_pdus["auth_chain"]
|
||||
],
|
||||
}))
|
||||
defer.returnValue(
|
||||
(
|
||||
200,
|
||||
{
|
||||
"state": [p.get_pdu_json(time_now) for p in res_pdus["state"]],
|
||||
"auth_chain": [
|
||||
p.get_pdu_json(time_now) for p in res_pdus["auth_chain"]
|
||||
],
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_make_leave_request(self, origin, room_id, user_id):
|
||||
|
@ -447,10 +428,9 @@ class FederationServer(FederationBase):
|
|||
room_version = yield self.store.get_room_version(room_id)
|
||||
|
||||
time_now = self._clock.time_msec()
|
||||
defer.returnValue({
|
||||
"event": pdu.get_pdu_json(time_now),
|
||||
"room_version": room_version,
|
||||
})
|
||||
defer.returnValue(
|
||||
{"event": pdu.get_pdu_json(time_now), "room_version": room_version}
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_send_leave_request(self, origin, content, room_id):
|
||||
|
@ -475,9 +455,7 @@ class FederationServer(FederationBase):
|
|||
|
||||
time_now = self._clock.time_msec()
|
||||
auth_pdus = yield self.handler.on_event_auth(event_id)
|
||||
res = {
|
||||
"auth_chain": [a.get_pdu_json(time_now) for a in auth_pdus],
|
||||
}
|
||||
res = {"auth_chain": [a.get_pdu_json(time_now) for a in auth_pdus]}
|
||||
defer.returnValue((200, res))
|
||||
|
||||
@defer.inlineCallbacks
|
||||
|
@ -508,12 +486,11 @@ class FederationServer(FederationBase):
|
|||
format_ver = room_version_to_event_format(room_version)
|
||||
|
||||
auth_chain = [
|
||||
event_from_pdu_json(e, format_ver)
|
||||
for e in content["auth_chain"]
|
||||
event_from_pdu_json(e, format_ver) for e in content["auth_chain"]
|
||||
]
|
||||
|
||||
signed_auth = yield self._check_sigs_and_hash_and_fetch(
|
||||
origin, auth_chain, outlier=True, room_version=room_version,
|
||||
origin, auth_chain, outlier=True, room_version=room_version
|
||||
)
|
||||
|
||||
ret = yield self.handler.on_query_auth(
|
||||
|
@ -527,17 +504,12 @@ class FederationServer(FederationBase):
|
|||
|
||||
time_now = self._clock.time_msec()
|
||||
send_content = {
|
||||
"auth_chain": [
|
||||
e.get_pdu_json(time_now)
|
||||
for e in ret["auth_chain"]
|
||||
],
|
||||
"auth_chain": [e.get_pdu_json(time_now) for e in ret["auth_chain"]],
|
||||
"rejects": ret.get("rejects", []),
|
||||
"missing": ret.get("missing", []),
|
||||
}
|
||||
|
||||
defer.returnValue(
|
||||
(200, send_content)
|
||||
)
|
||||
defer.returnValue((200, send_content))
|
||||
|
||||
@log_function
|
||||
def on_query_client_keys(self, origin, content):
|
||||
|
@ -566,20 +538,23 @@ class FederationServer(FederationBase):
|
|||
|
||||
logger.info(
|
||||
"Claimed one-time-keys: %s",
|
||||
",".join((
|
||||
"%s for %s:%s" % (key_id, user_id, device_id)
|
||||
for user_id, user_keys in iteritems(json_result)
|
||||
for device_id, device_keys in iteritems(user_keys)
|
||||
for key_id, _ in iteritems(device_keys)
|
||||
)),
|
||||
",".join(
|
||||
(
|
||||
"%s for %s:%s" % (key_id, user_id, device_id)
|
||||
for user_id, user_keys in iteritems(json_result)
|
||||
for device_id, device_keys in iteritems(user_keys)
|
||||
for key_id, _ in iteritems(device_keys)
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
defer.returnValue({"one_time_keys": json_result})
|
||||
|
||||
@defer.inlineCallbacks
|
||||
@log_function
|
||||
def on_get_missing_events(self, origin, room_id, earliest_events,
|
||||
latest_events, limit):
|
||||
def on_get_missing_events(
|
||||
self, origin, room_id, earliest_events, latest_events, limit
|
||||
):
|
||||
with (yield self._server_linearizer.queue((origin, room_id))):
|
||||
origin_host, _ = parse_server_name(origin)
|
||||
yield self.check_server_matches_acl(origin_host, room_id)
|
||||
|
@ -587,11 +562,13 @@ class FederationServer(FederationBase):
|
|||
logger.info(
|
||||
"on_get_missing_events: earliest_events: %r, latest_events: %r,"
|
||||
" limit: %d",
|
||||
earliest_events, latest_events, limit,
|
||||
earliest_events,
|
||||
latest_events,
|
||||
limit,
|
||||
)
|
||||
|
||||
missing_events = yield self.handler.on_get_missing_events(
|
||||
origin, room_id, earliest_events, latest_events, limit,
|
||||
origin, room_id, earliest_events, latest_events, limit
|
||||
)
|
||||
|
||||
if len(missing_events) < 5:
|
||||
|
@ -603,9 +580,9 @@ class FederationServer(FederationBase):
|
|||
|
||||
time_now = self._clock.time_msec()
|
||||
|
||||
defer.returnValue({
|
||||
"events": [ev.get_pdu_json(time_now) for ev in missing_events],
|
||||
})
|
||||
defer.returnValue(
|
||||
{"events": [ev.get_pdu_json(time_now) for ev in missing_events]}
|
||||
)
|
||||
|
||||
@log_function
|
||||
def on_openid_userinfo(self, token):
|
||||
|
@ -666,22 +643,17 @@ class FederationServer(FederationBase):
|
|||
# origin. See bug #1893. This is also true for some third party
|
||||
# invites).
|
||||
if not (
|
||||
pdu.type == 'm.room.member' and
|
||||
pdu.content and
|
||||
pdu.content.get("membership", None) in (
|
||||
Membership.JOIN, Membership.INVITE,
|
||||
)
|
||||
pdu.type == "m.room.member"
|
||||
and pdu.content
|
||||
and pdu.content.get("membership", None)
|
||||
in (Membership.JOIN, Membership.INVITE)
|
||||
):
|
||||
logger.info(
|
||||
"Discarding PDU %s from invalid origin %s",
|
||||
pdu.event_id, origin
|
||||
"Discarding PDU %s from invalid origin %s", pdu.event_id, origin
|
||||
)
|
||||
return
|
||||
else:
|
||||
logger.info(
|
||||
"Accepting join PDU %s from %s",
|
||||
pdu.event_id, origin
|
||||
)
|
||||
logger.info("Accepting join PDU %s from %s", pdu.event_id, origin)
|
||||
|
||||
# We've already checked that we know the room version by this point
|
||||
room_version = yield self.store.get_room_version(pdu.room_id)
|
||||
|
@ -690,33 +662,19 @@ class FederationServer(FederationBase):
|
|||
try:
|
||||
pdu = yield self._check_sigs_and_hash(room_version, pdu)
|
||||
except SynapseError as e:
|
||||
raise FederationError(
|
||||
"ERROR",
|
||||
e.code,
|
||||
e.msg,
|
||||
affected=pdu.event_id,
|
||||
)
|
||||
raise FederationError("ERROR", e.code, e.msg, affected=pdu.event_id)
|
||||
|
||||
yield self.handler.on_receive_pdu(
|
||||
origin, pdu, sent_to_us_directly=True,
|
||||
)
|
||||
yield self.handler.on_receive_pdu(origin, pdu, sent_to_us_directly=True)
|
||||
|
||||
def __str__(self):
|
||||
return "<ReplicationLayer(%s)>" % self.server_name
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def exchange_third_party_invite(
|
||||
self,
|
||||
sender_user_id,
|
||||
target_user_id,
|
||||
room_id,
|
||||
signed,
|
||||
self, sender_user_id, target_user_id, room_id, signed
|
||||
):
|
||||
ret = yield self.handler.exchange_third_party_invite(
|
||||
sender_user_id,
|
||||
target_user_id,
|
||||
room_id,
|
||||
signed,
|
||||
sender_user_id, target_user_id, room_id, signed
|
||||
)
|
||||
defer.returnValue(ret)
|
||||
|
||||
|
@ -771,7 +729,7 @@ def server_matches_acl_event(server_name, acl_event):
|
|||
allow_ip_literals = True
|
||||
if not allow_ip_literals:
|
||||
# check for ipv6 literals. These start with '['.
|
||||
if server_name[0] == '[':
|
||||
if server_name[0] == "[":
|
||||
return False
|
||||
|
||||
# check for ipv4 literals. We can just lift the routine from twisted.
|
||||
|
@ -805,7 +763,9 @@ def server_matches_acl_event(server_name, acl_event):
|
|||
|
||||
def _acl_entry_matches(server_name, acl_entry):
|
||||
if not isinstance(acl_entry, six.string_types):
|
||||
logger.warn("Ignoring non-str ACL entry '%s' (is %s)", acl_entry, type(acl_entry))
|
||||
logger.warn(
|
||||
"Ignoring non-str ACL entry '%s' (is %s)", acl_entry, type(acl_entry)
|
||||
)
|
||||
return False
|
||||
regex = glob_to_regex(acl_entry)
|
||||
return regex.match(server_name)
|
||||
|
@ -815,6 +775,7 @@ class FederationHandlerRegistry(object):
|
|||
"""Allows classes to register themselves as handlers for a given EDU or
|
||||
query type for incoming federation traffic.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.edu_handlers = {}
|
||||
self.query_handlers = {}
|
||||
|
@ -848,9 +809,7 @@ class FederationHandlerRegistry(object):
|
|||
on and the result used as the response to the query request.
|
||||
"""
|
||||
if query_type in self.query_handlers:
|
||||
raise KeyError(
|
||||
"Already have a Query handler for %s" % (query_type,)
|
||||
)
|
||||
raise KeyError("Already have a Query handler for %s" % (query_type,))
|
||||
|
||||
logger.info("Registering federation query handler for %r", query_type)
|
||||
|
||||
|
@ -905,14 +864,10 @@ class ReplicationFederationHandlerRegistry(FederationHandlerRegistry):
|
|||
handler = self.edu_handlers.get(edu_type)
|
||||
if handler:
|
||||
return super(ReplicationFederationHandlerRegistry, self).on_edu(
|
||||
edu_type, origin, content,
|
||||
edu_type, origin, content
|
||||
)
|
||||
|
||||
return self._send_edu(
|
||||
edu_type=edu_type,
|
||||
origin=origin,
|
||||
content=content,
|
||||
)
|
||||
return self._send_edu(edu_type=edu_type, origin=origin, content=content)
|
||||
|
||||
def on_query(self, query_type, args):
|
||||
"""Overrides FederationHandlerRegistry
|
||||
|
@ -921,7 +876,4 @@ class ReplicationFederationHandlerRegistry(FederationHandlerRegistry):
|
|||
if handler:
|
||||
return handler(args)
|
||||
|
||||
return self._get_query_client(
|
||||
query_type=query_type,
|
||||
args=args,
|
||||
)
|
||||
return self._get_query_client(query_type=query_type, args=args)
|
||||
|
|
|
@ -46,12 +46,9 @@ class TransactionActions(object):
|
|||
response code and response body.
|
||||
"""
|
||||
if not transaction.transaction_id:
|
||||
raise RuntimeError("Cannot persist a transaction with no "
|
||||
"transaction_id")
|
||||
raise RuntimeError("Cannot persist a transaction with no " "transaction_id")
|
||||
|
||||
return self.store.get_received_txn_response(
|
||||
transaction.transaction_id, origin
|
||||
)
|
||||
return self.store.get_received_txn_response(transaction.transaction_id, origin)
|
||||
|
||||
@log_function
|
||||
def set_response(self, origin, transaction, code, response):
|
||||
|
@ -61,14 +58,10 @@ class TransactionActions(object):
|
|||
Deferred
|
||||
"""
|
||||
if not transaction.transaction_id:
|
||||
raise RuntimeError("Cannot persist a transaction with no "
|
||||
"transaction_id")
|
||||
raise RuntimeError("Cannot persist a transaction with no " "transaction_id")
|
||||
|
||||
return self.store.set_received_txn_response(
|
||||
transaction.transaction_id,
|
||||
origin,
|
||||
code,
|
||||
response,
|
||||
transaction.transaction_id, origin, code, response
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
|
|
|
@ -77,12 +77,22 @@ class FederationRemoteSendQueue(object):
|
|||
# lambda binds to the queue rather than to the name of the queue which
|
||||
# changes. ARGH.
|
||||
def register(name, queue):
|
||||
LaterGauge("synapse_federation_send_queue_%s_size" % (queue_name,),
|
||||
"", [], lambda: len(queue))
|
||||
LaterGauge(
|
||||
"synapse_federation_send_queue_%s_size" % (queue_name,),
|
||||
"",
|
||||
[],
|
||||
lambda: len(queue),
|
||||
)
|
||||
|
||||
for queue_name in [
|
||||
"presence_map", "presence_changed", "keyed_edu", "keyed_edu_changed",
|
||||
"edus", "device_messages", "pos_time", "presence_destinations",
|
||||
"presence_map",
|
||||
"presence_changed",
|
||||
"keyed_edu",
|
||||
"keyed_edu_changed",
|
||||
"edus",
|
||||
"device_messages",
|
||||
"pos_time",
|
||||
"presence_destinations",
|
||||
]:
|
||||
register(queue_name, getattr(self, queue_name))
|
||||
|
||||
|
@ -121,9 +131,7 @@ class FederationRemoteSendQueue(object):
|
|||
del self.presence_changed[key]
|
||||
|
||||
user_ids = set(
|
||||
user_id
|
||||
for uids in self.presence_changed.values()
|
||||
for user_id in uids
|
||||
user_id for uids in self.presence_changed.values() for user_id in uids
|
||||
)
|
||||
|
||||
keys = self.presence_destinations.keys()
|
||||
|
@ -285,19 +293,21 @@ class FederationRemoteSendQueue(object):
|
|||
]
|
||||
|
||||
for (key, user_id) in dest_user_ids:
|
||||
rows.append((key, PresenceRow(
|
||||
state=self.presence_map[user_id],
|
||||
)))
|
||||
rows.append((key, PresenceRow(state=self.presence_map[user_id])))
|
||||
|
||||
# Fetch presence to send to destinations
|
||||
i = self.presence_destinations.bisect_right(from_token)
|
||||
j = self.presence_destinations.bisect_right(to_token) + 1
|
||||
|
||||
for pos, (user_id, dests) in self.presence_destinations.items()[i:j]:
|
||||
rows.append((pos, PresenceDestinationsRow(
|
||||
state=self.presence_map[user_id],
|
||||
destinations=list(dests),
|
||||
)))
|
||||
rows.append(
|
||||
(
|
||||
pos,
|
||||
PresenceDestinationsRow(
|
||||
state=self.presence_map[user_id], destinations=list(dests)
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
# Fetch changes keyed edus
|
||||
i = self.keyed_edu_changed.bisect_right(from_token)
|
||||
|
@ -308,10 +318,14 @@ class FederationRemoteSendQueue(object):
|
|||
keyed_edus = {v: k for k, v in self.keyed_edu_changed.items()[i:j]}
|
||||
|
||||
for ((destination, edu_key), pos) in iteritems(keyed_edus):
|
||||
rows.append((pos, KeyedEduRow(
|
||||
key=edu_key,
|
||||
edu=self.keyed_edu[(destination, edu_key)],
|
||||
)))
|
||||
rows.append(
|
||||
(
|
||||
pos,
|
||||
KeyedEduRow(
|
||||
key=edu_key, edu=self.keyed_edu[(destination, edu_key)]
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
# Fetch changed edus
|
||||
i = self.edus.bisect_right(from_token)
|
||||
|
@ -327,9 +341,7 @@ class FederationRemoteSendQueue(object):
|
|||
device_messages = {v: k for k, v in self.device_messages.items()[i:j]}
|
||||
|
||||
for (destination, pos) in iteritems(device_messages):
|
||||
rows.append((pos, DeviceRow(
|
||||
destination=destination,
|
||||
)))
|
||||
rows.append((pos, DeviceRow(destination=destination)))
|
||||
|
||||
# Sort rows based on pos
|
||||
rows.sort()
|
||||
|
@ -377,16 +389,14 @@ class BaseFederationRow(object):
|
|||
raise NotImplementedError()
|
||||
|
||||
|
||||
class PresenceRow(BaseFederationRow, namedtuple("PresenceRow", (
|
||||
"state", # UserPresenceState
|
||||
))):
|
||||
class PresenceRow(
|
||||
BaseFederationRow, namedtuple("PresenceRow", ("state",)) # UserPresenceState
|
||||
):
|
||||
TypeId = "p"
|
||||
|
||||
@staticmethod
|
||||
def from_data(data):
|
||||
return PresenceRow(
|
||||
state=UserPresenceState.from_dict(data)
|
||||
)
|
||||
return PresenceRow(state=UserPresenceState.from_dict(data))
|
||||
|
||||
def to_data(self):
|
||||
return self.state.as_dict()
|
||||
|
@ -395,33 +405,35 @@ class PresenceRow(BaseFederationRow, namedtuple("PresenceRow", (
|
|||
buff.presence.append(self.state)
|
||||
|
||||
|
||||
class PresenceDestinationsRow(BaseFederationRow, namedtuple("PresenceDestinationsRow", (
|
||||
"state", # UserPresenceState
|
||||
"destinations", # list[str]
|
||||
))):
|
||||
class PresenceDestinationsRow(
|
||||
BaseFederationRow,
|
||||
namedtuple(
|
||||
"PresenceDestinationsRow",
|
||||
("state", "destinations"), # UserPresenceState # list[str]
|
||||
),
|
||||
):
|
||||
TypeId = "pd"
|
||||
|
||||
@staticmethod
|
||||
def from_data(data):
|
||||
return PresenceDestinationsRow(
|
||||
state=UserPresenceState.from_dict(data["state"]),
|
||||
destinations=data["dests"],
|
||||
state=UserPresenceState.from_dict(data["state"]), destinations=data["dests"]
|
||||
)
|
||||
|
||||
def to_data(self):
|
||||
return {
|
||||
"state": self.state.as_dict(),
|
||||
"dests": self.destinations,
|
||||
}
|
||||
return {"state": self.state.as_dict(), "dests": self.destinations}
|
||||
|
||||
def add_to_buffer(self, buff):
|
||||
buff.presence_destinations.append((self.state, self.destinations))
|
||||
|
||||
|
||||
class KeyedEduRow(BaseFederationRow, namedtuple("KeyedEduRow", (
|
||||
"key", # tuple(str) - the edu key passed to send_edu
|
||||
"edu", # Edu
|
||||
))):
|
||||
class KeyedEduRow(
|
||||
BaseFederationRow,
|
||||
namedtuple(
|
||||
"KeyedEduRow",
|
||||
("key", "edu"), # tuple(str) - the edu key passed to send_edu # Edu
|
||||
),
|
||||
):
|
||||
"""Streams EDUs that have an associated key that is ued to clobber. For example,
|
||||
typing EDUs clobber based on room_id.
|
||||
"""
|
||||
|
@ -430,28 +442,19 @@ class KeyedEduRow(BaseFederationRow, namedtuple("KeyedEduRow", (
|
|||
|
||||
@staticmethod
|
||||
def from_data(data):
|
||||
return KeyedEduRow(
|
||||
key=tuple(data["key"]),
|
||||
edu=Edu(**data["edu"]),
|
||||
)
|
||||
return KeyedEduRow(key=tuple(data["key"]), edu=Edu(**data["edu"]))
|
||||
|
||||
def to_data(self):
|
||||
return {
|
||||
"key": self.key,
|
||||
"edu": self.edu.get_internal_dict(),
|
||||
}
|
||||
return {"key": self.key, "edu": self.edu.get_internal_dict()}
|
||||
|
||||
def add_to_buffer(self, buff):
|
||||
buff.keyed_edus.setdefault(
|
||||
self.edu.destination, {}
|
||||
)[self.key] = self.edu
|
||||
buff.keyed_edus.setdefault(self.edu.destination, {})[self.key] = self.edu
|
||||
|
||||
|
||||
class EduRow(BaseFederationRow, namedtuple("EduRow", (
|
||||
"edu", # Edu
|
||||
))):
|
||||
class EduRow(BaseFederationRow, namedtuple("EduRow", ("edu",))): # Edu
|
||||
"""Streams EDUs that don't have keys. See KeyedEduRow
|
||||
"""
|
||||
|
||||
TypeId = "e"
|
||||
|
||||
@staticmethod
|
||||
|
@ -465,13 +468,12 @@ class EduRow(BaseFederationRow, namedtuple("EduRow", (
|
|||
buff.edus.setdefault(self.edu.destination, []).append(self.edu)
|
||||
|
||||
|
||||
class DeviceRow(BaseFederationRow, namedtuple("DeviceRow", (
|
||||
"destination", # str
|
||||
))):
|
||||
class DeviceRow(BaseFederationRow, namedtuple("DeviceRow", ("destination",))): # str
|
||||
"""Streams the fact that either a) there is pending to device messages for
|
||||
users on the remote, or b) a local users device has changed and needs to
|
||||
be sent to the remote.
|
||||
"""
|
||||
|
||||
TypeId = "d"
|
||||
|
||||
@staticmethod
|
||||
|
@ -487,23 +489,20 @@ class DeviceRow(BaseFederationRow, namedtuple("DeviceRow", (
|
|||
|
||||
TypeToRow = {
|
||||
Row.TypeId: Row
|
||||
for Row in (
|
||||
PresenceRow,
|
||||
PresenceDestinationsRow,
|
||||
KeyedEduRow,
|
||||
EduRow,
|
||||
DeviceRow,
|
||||
)
|
||||
for Row in (PresenceRow, PresenceDestinationsRow, KeyedEduRow, EduRow, DeviceRow)
|
||||
}
|
||||
|
||||
|
||||
ParsedFederationStreamData = namedtuple("ParsedFederationStreamData", (
|
||||
"presence", # list(UserPresenceState)
|
||||
"presence_destinations", # list of tuples of UserPresenceState and destinations
|
||||
"keyed_edus", # dict of destination -> { key -> Edu }
|
||||
"edus", # dict of destination -> [Edu]
|
||||
"device_destinations", # set of destinations
|
||||
))
|
||||
ParsedFederationStreamData = namedtuple(
|
||||
"ParsedFederationStreamData",
|
||||
(
|
||||
"presence", # list(UserPresenceState)
|
||||
"presence_destinations", # list of tuples of UserPresenceState and destinations
|
||||
"keyed_edus", # dict of destination -> { key -> Edu }
|
||||
"edus", # dict of destination -> [Edu]
|
||||
"device_destinations", # set of destinations
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def process_rows_for_federation(transaction_queue, rows):
|
||||
|
@ -542,7 +541,7 @@ def process_rows_for_federation(transaction_queue, rows):
|
|||
|
||||
for state, destinations in buff.presence_destinations:
|
||||
transaction_queue.send_presence_to_destinations(
|
||||
states=[state], destinations=destinations,
|
||||
states=[state], destinations=destinations
|
||||
)
|
||||
|
||||
for destination, edu_map in iteritems(buff.keyed_edus):
|
||||
|
|
|
@ -44,8 +44,8 @@ sent_pdus_destination_dist_count = Counter(
|
|||
)
|
||||
|
||||
sent_pdus_destination_dist_total = Counter(
|
||||
"synapse_federation_client_sent_pdu_destinations:total", ""
|
||||
"Total number of PDUs queued for sending across all destinations",
|
||||
"synapse_federation_client_sent_pdu_destinations:total",
|
||||
"" "Total number of PDUs queued for sending across all destinations",
|
||||
)
|
||||
|
||||
|
||||
|
@ -63,14 +63,15 @@ class FederationSender(object):
|
|||
self._transaction_manager = TransactionManager(hs)
|
||||
|
||||
# map from destination to PerDestinationQueue
|
||||
self._per_destination_queues = {} # type: dict[str, PerDestinationQueue]
|
||||
self._per_destination_queues = {} # type: dict[str, PerDestinationQueue]
|
||||
|
||||
LaterGauge(
|
||||
"synapse_federation_transaction_queue_pending_destinations",
|
||||
"",
|
||||
[],
|
||||
lambda: sum(
|
||||
1 for d in self._per_destination_queues.values()
|
||||
1
|
||||
for d in self._per_destination_queues.values()
|
||||
if d.transmission_loop_running
|
||||
),
|
||||
)
|
||||
|
@ -108,8 +109,9 @@ class FederationSender(object):
|
|||
# awaiting a call to flush_read_receipts_for_room. The presence of an entry
|
||||
# here for a given room means that we are rate-limiting RR flushes to that room,
|
||||
# and that there is a pending call to _flush_rrs_for_room in the system.
|
||||
self._queues_awaiting_rr_flush_by_room = {
|
||||
} # type: dict[str, set[PerDestinationQueue]]
|
||||
self._queues_awaiting_rr_flush_by_room = (
|
||||
{}
|
||||
) # type: dict[str, set[PerDestinationQueue]]
|
||||
|
||||
self._rr_txn_interval_per_room_ms = (
|
||||
1000.0 / hs.get_config().federation_rr_transactions_per_room_per_second
|
||||
|
@ -141,8 +143,7 @@ class FederationSender(object):
|
|||
|
||||
# fire off a processing loop in the background
|
||||
run_as_background_process(
|
||||
"process_event_queue_for_federation",
|
||||
self._process_event_queue_loop,
|
||||
"process_event_queue_for_federation", self._process_event_queue_loop
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
|
@ -152,7 +153,7 @@ class FederationSender(object):
|
|||
while True:
|
||||
last_token = yield self.store.get_federation_out_pos("events")
|
||||
next_token, events = yield self.store.get_all_new_events_stream(
|
||||
last_token, self._last_poked_id, limit=100,
|
||||
last_token, self._last_poked_id, limit=100
|
||||
)
|
||||
|
||||
logger.debug("Handling %s -> %s", last_token, next_token)
|
||||
|
@ -179,7 +180,7 @@ class FederationSender(object):
|
|||
# banned then it won't receive the event because it won't
|
||||
# be in the room after the ban.
|
||||
destinations = yield self.state.get_current_hosts_in_room(
|
||||
event.room_id, latest_event_ids=event.prev_event_ids(),
|
||||
event.room_id, latest_event_ids=event.prev_event_ids()
|
||||
)
|
||||
except Exception:
|
||||
logger.exception(
|
||||
|
@ -209,37 +210,40 @@ class FederationSender(object):
|
|||
for event in events:
|
||||
events_by_room.setdefault(event.room_id, []).append(event)
|
||||
|
||||
yield logcontext.make_deferred_yieldable(defer.gatherResults(
|
||||
[
|
||||
logcontext.run_in_background(handle_room_events, evs)
|
||||
for evs in itervalues(events_by_room)
|
||||
],
|
||||
consumeErrors=True
|
||||
))
|
||||
|
||||
yield self.store.update_federation_out_pos(
|
||||
"events", next_token
|
||||
yield logcontext.make_deferred_yieldable(
|
||||
defer.gatherResults(
|
||||
[
|
||||
logcontext.run_in_background(handle_room_events, evs)
|
||||
for evs in itervalues(events_by_room)
|
||||
],
|
||||
consumeErrors=True,
|
||||
)
|
||||
)
|
||||
|
||||
yield self.store.update_federation_out_pos("events", next_token)
|
||||
|
||||
if events:
|
||||
now = self.clock.time_msec()
|
||||
ts = yield self.store.get_received_ts(events[-1].event_id)
|
||||
|
||||
synapse.metrics.event_processing_lag.labels(
|
||||
"federation_sender").set(now - ts)
|
||||
"federation_sender"
|
||||
).set(now - ts)
|
||||
synapse.metrics.event_processing_last_ts.labels(
|
||||
"federation_sender").set(ts)
|
||||
"federation_sender"
|
||||
).set(ts)
|
||||
|
||||
events_processed_counter.inc(len(events))
|
||||
|
||||
event_processing_loop_room_count.labels(
|
||||
"federation_sender"
|
||||
).inc(len(events_by_room))
|
||||
event_processing_loop_room_count.labels("federation_sender").inc(
|
||||
len(events_by_room)
|
||||
)
|
||||
|
||||
event_processing_loop_counter.labels("federation_sender").inc()
|
||||
|
||||
synapse.metrics.event_processing_positions.labels(
|
||||
"federation_sender").set(next_token)
|
||||
"federation_sender"
|
||||
).set(next_token)
|
||||
|
||||
finally:
|
||||
self._is_processing = False
|
||||
|
@ -312,9 +316,7 @@ class FederationSender(object):
|
|||
if not domains:
|
||||
return
|
||||
|
||||
queues_pending_flush = self._queues_awaiting_rr_flush_by_room.get(
|
||||
room_id
|
||||
)
|
||||
queues_pending_flush = self._queues_awaiting_rr_flush_by_room.get(room_id)
|
||||
|
||||
# if there is no flush yet scheduled, we will send out these receipts with
|
||||
# immediate flushes, and schedule the next flush for this room.
|
||||
|
@ -377,10 +379,9 @@ class FederationSender(object):
|
|||
# updates in quick succession are correctly handled.
|
||||
# We only want to send presence for our own users, so lets always just
|
||||
# filter here just in case.
|
||||
self.pending_presence.update({
|
||||
state.user_id: state for state in states
|
||||
if self.is_mine_id(state.user_id)
|
||||
})
|
||||
self.pending_presence.update(
|
||||
{state.user_id: state for state in states if self.is_mine_id(state.user_id)}
|
||||
)
|
||||
|
||||
# We then handle the new pending presence in batches, first figuring
|
||||
# out the destinations we need to send each state to and then poking it
|
||||
|
|
|
@ -360,7 +360,7 @@ class PerDestinationQueue(object):
|
|||
|
||||
# Retrieve list of new device updates to send to the destination
|
||||
now_stream_id, results = yield self._store.get_devices_by_remote(
|
||||
self._destination, last_device_list, limit=limit,
|
||||
self._destination, last_device_list, limit=limit
|
||||
)
|
||||
edus = [
|
||||
Edu(
|
||||
|
@ -381,10 +381,7 @@ class PerDestinationQueue(object):
|
|||
last_device_stream_id = self._last_device_stream_id
|
||||
to_device_stream_id = self._store.get_to_device_stream_token()
|
||||
contents, stream_id = yield self._store.get_new_device_msgs_for_remote(
|
||||
self._destination,
|
||||
last_device_stream_id,
|
||||
to_device_stream_id,
|
||||
limit,
|
||||
self._destination, last_device_stream_id, to_device_stream_id, limit
|
||||
)
|
||||
edus = [
|
||||
Edu(
|
||||
|
|
|
@ -29,9 +29,10 @@ class TransactionManager(object):
|
|||
|
||||
shared between PerDestinationQueue objects
|
||||
"""
|
||||
|
||||
def __init__(self, hs):
|
||||
self._server_name = hs.hostname
|
||||
self.clock = hs.get_clock() # nb must be called this for @measure_func
|
||||
self.clock = hs.get_clock() # nb must be called this for @measure_func
|
||||
self._store = hs.get_datastore()
|
||||
self._transaction_actions = TransactionActions(self._store)
|
||||
self._transport_layer = hs.get_federation_transport_client()
|
||||
|
@ -55,9 +56,9 @@ class TransactionManager(object):
|
|||
txn_id = str(self._next_txn_id)
|
||||
|
||||
logger.debug(
|
||||
"TX [%s] {%s} Attempting new transaction"
|
||||
" (pdus: %d, edus: %d)",
|
||||
destination, txn_id,
|
||||
"TX [%s] {%s} Attempting new transaction" " (pdus: %d, edus: %d)",
|
||||
destination,
|
||||
txn_id,
|
||||
len(pdus),
|
||||
len(edus),
|
||||
)
|
||||
|
@ -79,9 +80,9 @@ class TransactionManager(object):
|
|||
|
||||
logger.debug("TX [%s] Persisted transaction", destination)
|
||||
logger.info(
|
||||
"TX [%s] {%s} Sending transaction [%s],"
|
||||
" (PDUs: %d, EDUs: %d)",
|
||||
destination, txn_id,
|
||||
"TX [%s] {%s} Sending transaction [%s]," " (PDUs: %d, EDUs: %d)",
|
||||
destination,
|
||||
txn_id,
|
||||
transaction.transaction_id,
|
||||
len(pdus),
|
||||
len(edus),
|
||||
|
@ -112,20 +113,12 @@ class TransactionManager(object):
|
|||
response = e.response
|
||||
|
||||
if e.code in (401, 404, 429) or 500 <= e.code:
|
||||
logger.info(
|
||||
"TX [%s] {%s} got %d response",
|
||||
destination, txn_id, code
|
||||
)
|
||||
logger.info("TX [%s] {%s} got %d response", destination, txn_id, code)
|
||||
raise e
|
||||
|
||||
logger.info(
|
||||
"TX [%s] {%s} got %d response",
|
||||
destination, txn_id, code
|
||||
)
|
||||
logger.info("TX [%s] {%s} got %d response", destination, txn_id, code)
|
||||
|
||||
yield self._transaction_actions.delivered(
|
||||
transaction, code, response
|
||||
)
|
||||
yield self._transaction_actions.delivered(transaction, code, response)
|
||||
|
||||
logger.debug("TX [%s] {%s} Marked as delivered", destination, txn_id)
|
||||
|
||||
|
@ -134,13 +127,18 @@ class TransactionManager(object):
|
|||
if "error" in r:
|
||||
logger.warn(
|
||||
"TX [%s] {%s} Remote returned error for %s: %s",
|
||||
destination, txn_id, e_id, r,
|
||||
destination,
|
||||
txn_id,
|
||||
e_id,
|
||||
r,
|
||||
)
|
||||
else:
|
||||
for p in pdus:
|
||||
logger.warn(
|
||||
"TX [%s] {%s} Failed to send event %s",
|
||||
destination, txn_id, p.event_id,
|
||||
destination,
|
||||
txn_id,
|
||||
p.event_id,
|
||||
)
|
||||
success = False
|
||||
|
||||
|
|
|
@ -48,12 +48,13 @@ class TransportLayerClient(object):
|
|||
Returns:
|
||||
Deferred: Results in a dict received from the remote homeserver.
|
||||
"""
|
||||
logger.debug("get_room_state dest=%s, room=%s",
|
||||
destination, room_id)
|
||||
logger.debug("get_room_state dest=%s, room=%s", destination, room_id)
|
||||
|
||||
path = _create_v1_path("/state/%s", room_id)
|
||||
return self.client.get_json(
|
||||
destination, path=path, args={"event_id": event_id},
|
||||
destination,
|
||||
path=path,
|
||||
args={"event_id": event_id},
|
||||
try_trailing_slash_on_400=True,
|
||||
)
|
||||
|
||||
|
@ -71,12 +72,13 @@ class TransportLayerClient(object):
|
|||
Returns:
|
||||
Deferred: Results in a dict received from the remote homeserver.
|
||||
"""
|
||||
logger.debug("get_room_state_ids dest=%s, room=%s",
|
||||
destination, room_id)
|
||||
logger.debug("get_room_state_ids dest=%s, room=%s", destination, room_id)
|
||||
|
||||
path = _create_v1_path("/state_ids/%s", room_id)
|
||||
return self.client.get_json(
|
||||
destination, path=path, args={"event_id": event_id},
|
||||
destination,
|
||||
path=path,
|
||||
args={"event_id": event_id},
|
||||
try_trailing_slash_on_400=True,
|
||||
)
|
||||
|
||||
|
@ -94,13 +96,11 @@ class TransportLayerClient(object):
|
|||
Returns:
|
||||
Deferred: Results in a dict received from the remote homeserver.
|
||||
"""
|
||||
logger.debug("get_pdu dest=%s, event_id=%s",
|
||||
destination, event_id)
|
||||
logger.debug("get_pdu dest=%s, event_id=%s", destination, event_id)
|
||||
|
||||
path = _create_v1_path("/event/%s", event_id)
|
||||
return self.client.get_json(
|
||||
destination, path=path, timeout=timeout,
|
||||
try_trailing_slash_on_400=True,
|
||||
destination, path=path, timeout=timeout, try_trailing_slash_on_400=True
|
||||
)
|
||||
|
||||
@log_function
|
||||
|
@ -119,7 +119,10 @@ class TransportLayerClient(object):
|
|||
"""
|
||||
logger.debug(
|
||||
"backfill dest=%s, room_id=%s, event_tuples=%s, limit=%s",
|
||||
destination, room_id, repr(event_tuples), str(limit)
|
||||
destination,
|
||||
room_id,
|
||||
repr(event_tuples),
|
||||
str(limit),
|
||||
)
|
||||
|
||||
if not event_tuples:
|
||||
|
@ -128,16 +131,10 @@ class TransportLayerClient(object):
|
|||
|
||||
path = _create_v1_path("/backfill/%s", room_id)
|
||||
|
||||
args = {
|
||||
"v": event_tuples,
|
||||
"limit": [str(limit)],
|
||||
}
|
||||
args = {"v": event_tuples, "limit": [str(limit)]}
|
||||
|
||||
return self.client.get_json(
|
||||
destination,
|
||||
path=path,
|
||||
args=args,
|
||||
try_trailing_slash_on_400=True,
|
||||
destination, path=path, args=args, try_trailing_slash_on_400=True
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
|
@ -163,7 +160,8 @@ class TransportLayerClient(object):
|
|||
"""
|
||||
logger.debug(
|
||||
"send_data dest=%s, txid=%s",
|
||||
transaction.destination, transaction.transaction_id
|
||||
transaction.destination,
|
||||
transaction.transaction_id,
|
||||
)
|
||||
|
||||
if transaction.destination == self.server_name:
|
||||
|
@ -189,8 +187,9 @@ class TransportLayerClient(object):
|
|||
|
||||
@defer.inlineCallbacks
|
||||
@log_function
|
||||
def make_query(self, destination, query_type, args, retry_on_dns_fail,
|
||||
ignore_backoff=False):
|
||||
def make_query(
|
||||
self, destination, query_type, args, retry_on_dns_fail, ignore_backoff=False
|
||||
):
|
||||
path = _create_v1_path("/query/%s", query_type)
|
||||
|
||||
content = yield self.client.get_json(
|
||||
|
@ -235,8 +234,8 @@ class TransportLayerClient(object):
|
|||
valid_memberships = {Membership.JOIN, Membership.LEAVE}
|
||||
if membership not in valid_memberships:
|
||||
raise RuntimeError(
|
||||
"make_membership_event called with membership='%s', must be one of %s" %
|
||||
(membership, ",".join(valid_memberships))
|
||||
"make_membership_event called with membership='%s', must be one of %s"
|
||||
% (membership, ",".join(valid_memberships))
|
||||
)
|
||||
path = _create_v1_path("/make_%s/%s/%s", membership, room_id, user_id)
|
||||
|
||||
|
@ -268,9 +267,7 @@ class TransportLayerClient(object):
|
|||
path = _create_v1_path("/send_join/%s/%s", room_id, event_id)
|
||||
|
||||
response = yield self.client.put_json(
|
||||
destination=destination,
|
||||
path=path,
|
||||
data=content,
|
||||
destination=destination, path=path, data=content
|
||||
)
|
||||
|
||||
defer.returnValue(response)
|
||||
|
@ -284,7 +281,6 @@ class TransportLayerClient(object):
|
|||
destination=destination,
|
||||
path=path,
|
||||
data=content,
|
||||
|
||||
# we want to do our best to send this through. The problem is
|
||||
# that if it fails, we won't retry it later, so if the remote
|
||||
# server was just having a momentary blip, the room will be out of
|
||||
|
@ -300,10 +296,7 @@ class TransportLayerClient(object):
|
|||
path = _create_v1_path("/invite/%s/%s", room_id, event_id)
|
||||
|
||||
response = yield self.client.put_json(
|
||||
destination=destination,
|
||||
path=path,
|
||||
data=content,
|
||||
ignore_backoff=True,
|
||||
destination=destination, path=path, data=content, ignore_backoff=True
|
||||
)
|
||||
|
||||
defer.returnValue(response)
|
||||
|
@ -314,26 +307,27 @@ class TransportLayerClient(object):
|
|||
path = _create_v2_path("/invite/%s/%s", room_id, event_id)
|
||||
|
||||
response = yield self.client.put_json(
|
||||
destination=destination,
|
||||
path=path,
|
||||
data=content,
|
||||
ignore_backoff=True,
|
||||
destination=destination, path=path, data=content, ignore_backoff=True
|
||||
)
|
||||
|
||||
defer.returnValue(response)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
@log_function
|
||||
def get_public_rooms(self, remote_server, limit, since_token,
|
||||
search_filter=None, include_all_networks=False,
|
||||
third_party_instance_id=None):
|
||||
def get_public_rooms(
|
||||
self,
|
||||
remote_server,
|
||||
limit,
|
||||
since_token,
|
||||
search_filter=None,
|
||||
include_all_networks=False,
|
||||
third_party_instance_id=None,
|
||||
):
|
||||
path = _create_v1_path("/publicRooms")
|
||||
|
||||
args = {
|
||||
"include_all_networks": "true" if include_all_networks else "false",
|
||||
}
|
||||
args = {"include_all_networks": "true" if include_all_networks else "false"}
|
||||
if third_party_instance_id:
|
||||
args["third_party_instance_id"] = third_party_instance_id,
|
||||
args["third_party_instance_id"] = (third_party_instance_id,)
|
||||
if limit:
|
||||
args["limit"] = [str(limit)]
|
||||
if since_token:
|
||||
|
@ -342,10 +336,7 @@ class TransportLayerClient(object):
|
|||
# TODO(erikj): Actually send the search_filter across federation.
|
||||
|
||||
response = yield self.client.get_json(
|
||||
destination=remote_server,
|
||||
path=path,
|
||||
args=args,
|
||||
ignore_backoff=True,
|
||||
destination=remote_server, path=path, args=args, ignore_backoff=True
|
||||
)
|
||||
|
||||
defer.returnValue(response)
|
||||
|
@ -353,12 +344,10 @@ class TransportLayerClient(object):
|
|||
@defer.inlineCallbacks
|
||||
@log_function
|
||||
def exchange_third_party_invite(self, destination, room_id, event_dict):
|
||||
path = _create_v1_path("/exchange_third_party_invite/%s", room_id,)
|
||||
path = _create_v1_path("/exchange_third_party_invite/%s", room_id)
|
||||
|
||||
response = yield self.client.put_json(
|
||||
destination=destination,
|
||||
path=path,
|
||||
data=event_dict,
|
||||
destination=destination, path=path, data=event_dict
|
||||
)
|
||||
|
||||
defer.returnValue(response)
|
||||
|
@ -368,10 +357,7 @@ class TransportLayerClient(object):
|
|||
def get_event_auth(self, destination, room_id, event_id):
|
||||
path = _create_v1_path("/event_auth/%s/%s", room_id, event_id)
|
||||
|
||||
content = yield self.client.get_json(
|
||||
destination=destination,
|
||||
path=path,
|
||||
)
|
||||
content = yield self.client.get_json(destination=destination, path=path)
|
||||
|
||||
defer.returnValue(content)
|
||||
|
||||
|
@ -381,9 +367,7 @@ class TransportLayerClient(object):
|
|||
path = _create_v1_path("/query_auth/%s/%s", room_id, event_id)
|
||||
|
||||
content = yield self.client.post_json(
|
||||
destination=destination,
|
||||
path=path,
|
||||
data=content,
|
||||
destination=destination, path=path, data=content
|
||||
)
|
||||
|
||||
defer.returnValue(content)
|
||||
|
@ -416,10 +400,7 @@ class TransportLayerClient(object):
|
|||
path = _create_v1_path("/user/keys/query")
|
||||
|
||||
content = yield self.client.post_json(
|
||||
destination=destination,
|
||||
path=path,
|
||||
data=query_content,
|
||||
timeout=timeout,
|
||||
destination=destination, path=path, data=query_content, timeout=timeout
|
||||
)
|
||||
defer.returnValue(content)
|
||||
|
||||
|
@ -443,9 +424,7 @@ class TransportLayerClient(object):
|
|||
path = _create_v1_path("/user/devices/%s", user_id)
|
||||
|
||||
content = yield self.client.get_json(
|
||||
destination=destination,
|
||||
path=path,
|
||||
timeout=timeout,
|
||||
destination=destination, path=path, timeout=timeout
|
||||
)
|
||||
defer.returnValue(content)
|
||||
|
||||
|
@ -479,18 +458,23 @@ class TransportLayerClient(object):
|
|||
path = _create_v1_path("/user/keys/claim")
|
||||
|
||||
content = yield self.client.post_json(
|
||||
destination=destination,
|
||||
path=path,
|
||||
data=query_content,
|
||||
timeout=timeout,
|
||||
destination=destination, path=path, data=query_content, timeout=timeout
|
||||
)
|
||||
defer.returnValue(content)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
@log_function
|
||||
def get_missing_events(self, destination, room_id, earliest_events,
|
||||
latest_events, limit, min_depth, timeout):
|
||||
path = _create_v1_path("/get_missing_events/%s", room_id,)
|
||||
def get_missing_events(
|
||||
self,
|
||||
destination,
|
||||
room_id,
|
||||
earliest_events,
|
||||
latest_events,
|
||||
limit,
|
||||
min_depth,
|
||||
timeout,
|
||||
):
|
||||
path = _create_v1_path("/get_missing_events/%s", room_id)
|
||||
|
||||
content = yield self.client.post_json(
|
||||
destination=destination,
|
||||
|
@ -510,7 +494,7 @@ class TransportLayerClient(object):
|
|||
def get_group_profile(self, destination, group_id, requester_user_id):
|
||||
"""Get a group profile
|
||||
"""
|
||||
path = _create_v1_path("/groups/%s/profile", group_id,)
|
||||
path = _create_v1_path("/groups/%s/profile", group_id)
|
||||
|
||||
return self.client.get_json(
|
||||
destination=destination,
|
||||
|
@ -529,7 +513,7 @@ class TransportLayerClient(object):
|
|||
requester_user_id (str)
|
||||
content (dict): The new profile of the group
|
||||
"""
|
||||
path = _create_v1_path("/groups/%s/profile", group_id,)
|
||||
path = _create_v1_path("/groups/%s/profile", group_id)
|
||||
|
||||
return self.client.post_json(
|
||||
destination=destination,
|
||||
|
@ -543,7 +527,7 @@ class TransportLayerClient(object):
|
|||
def get_group_summary(self, destination, group_id, requester_user_id):
|
||||
"""Get a group summary
|
||||
"""
|
||||
path = _create_v1_path("/groups/%s/summary", group_id,)
|
||||
path = _create_v1_path("/groups/%s/summary", group_id)
|
||||
|
||||
return self.client.get_json(
|
||||
destination=destination,
|
||||
|
@ -556,7 +540,7 @@ class TransportLayerClient(object):
|
|||
def get_rooms_in_group(self, destination, group_id, requester_user_id):
|
||||
"""Get all rooms in a group
|
||||
"""
|
||||
path = _create_v1_path("/groups/%s/rooms", group_id,)
|
||||
path = _create_v1_path("/groups/%s/rooms", group_id)
|
||||
|
||||
return self.client.get_json(
|
||||
destination=destination,
|
||||
|
@ -565,11 +549,12 @@ class TransportLayerClient(object):
|
|||
ignore_backoff=True,
|
||||
)
|
||||
|
||||
def add_room_to_group(self, destination, group_id, requester_user_id, room_id,
|
||||
content):
|
||||
def add_room_to_group(
|
||||
self, destination, group_id, requester_user_id, room_id, content
|
||||
):
|
||||
"""Add a room to a group
|
||||
"""
|
||||
path = _create_v1_path("/groups/%s/room/%s", group_id, room_id,)
|
||||
path = _create_v1_path("/groups/%s/room/%s", group_id, room_id)
|
||||
|
||||
return self.client.post_json(
|
||||
destination=destination,
|
||||
|
@ -579,13 +564,13 @@ class TransportLayerClient(object):
|
|||
ignore_backoff=True,
|
||||
)
|
||||
|
||||
def update_room_in_group(self, destination, group_id, requester_user_id, room_id,
|
||||
config_key, content):
|
||||
def update_room_in_group(
|
||||
self, destination, group_id, requester_user_id, room_id, config_key, content
|
||||
):
|
||||
"""Update room in group
|
||||
"""
|
||||
path = _create_v1_path(
|
||||
"/groups/%s/room/%s/config/%s",
|
||||
group_id, room_id, config_key,
|
||||
"/groups/%s/room/%s/config/%s", group_id, room_id, config_key
|
||||
)
|
||||
|
||||
return self.client.post_json(
|
||||
|
@ -599,7 +584,7 @@ class TransportLayerClient(object):
|
|||
def remove_room_from_group(self, destination, group_id, requester_user_id, room_id):
|
||||
"""Remove a room from a group
|
||||
"""
|
||||
path = _create_v1_path("/groups/%s/room/%s", group_id, room_id,)
|
||||
path = _create_v1_path("/groups/%s/room/%s", group_id, room_id)
|
||||
|
||||
return self.client.delete_json(
|
||||
destination=destination,
|
||||
|
@ -612,7 +597,7 @@ class TransportLayerClient(object):
|
|||
def get_users_in_group(self, destination, group_id, requester_user_id):
|
||||
"""Get users in a group
|
||||
"""
|
||||
path = _create_v1_path("/groups/%s/users", group_id,)
|
||||
path = _create_v1_path("/groups/%s/users", group_id)
|
||||
|
||||
return self.client.get_json(
|
||||
destination=destination,
|
||||
|
@ -625,7 +610,7 @@ class TransportLayerClient(object):
|
|||
def get_invited_users_in_group(self, destination, group_id, requester_user_id):
|
||||
"""Get users that have been invited to a group
|
||||
"""
|
||||
path = _create_v1_path("/groups/%s/invited_users", group_id,)
|
||||
path = _create_v1_path("/groups/%s/invited_users", group_id)
|
||||
|
||||
return self.client.get_json(
|
||||
destination=destination,
|
||||
|
@ -638,16 +623,10 @@ class TransportLayerClient(object):
|
|||
def accept_group_invite(self, destination, group_id, user_id, content):
|
||||
"""Accept a group invite
|
||||
"""
|
||||
path = _create_v1_path(
|
||||
"/groups/%s/users/%s/accept_invite",
|
||||
group_id, user_id,
|
||||
)
|
||||
path = _create_v1_path("/groups/%s/users/%s/accept_invite", group_id, user_id)
|
||||
|
||||
return self.client.post_json(
|
||||
destination=destination,
|
||||
path=path,
|
||||
data=content,
|
||||
ignore_backoff=True,
|
||||
destination=destination, path=path, data=content, ignore_backoff=True
|
||||
)
|
||||
|
||||
@log_function
|
||||
|
@ -657,14 +636,13 @@ class TransportLayerClient(object):
|
|||
path = _create_v1_path("/groups/%s/users/%s/join", group_id, user_id)
|
||||
|
||||
return self.client.post_json(
|
||||
destination=destination,
|
||||
path=path,
|
||||
data=content,
|
||||
ignore_backoff=True,
|
||||
destination=destination, path=path, data=content, ignore_backoff=True
|
||||
)
|
||||
|
||||
@log_function
|
||||
def invite_to_group(self, destination, group_id, user_id, requester_user_id, content):
|
||||
def invite_to_group(
|
||||
self, destination, group_id, user_id, requester_user_id, content
|
||||
):
|
||||
"""Invite a user to a group
|
||||
"""
|
||||
path = _create_v1_path("/groups/%s/users/%s/invite", group_id, user_id)
|
||||
|
@ -686,15 +664,13 @@ class TransportLayerClient(object):
|
|||
path = _create_v1_path("/groups/local/%s/users/%s/invite", group_id, user_id)
|
||||
|
||||
return self.client.post_json(
|
||||
destination=destination,
|
||||
path=path,
|
||||
data=content,
|
||||
ignore_backoff=True,
|
||||
destination=destination, path=path, data=content, ignore_backoff=True
|
||||
)
|
||||
|
||||
@log_function
|
||||
def remove_user_from_group(self, destination, group_id, requester_user_id,
|
||||
user_id, content):
|
||||
def remove_user_from_group(
|
||||
self, destination, group_id, requester_user_id, user_id, content
|
||||
):
|
||||
"""Remove a user fron a group
|
||||
"""
|
||||
path = _create_v1_path("/groups/%s/users/%s/remove", group_id, user_id)
|
||||
|
@ -708,8 +684,9 @@ class TransportLayerClient(object):
|
|||
)
|
||||
|
||||
@log_function
|
||||
def remove_user_from_group_notification(self, destination, group_id, user_id,
|
||||
content):
|
||||
def remove_user_from_group_notification(
|
||||
self, destination, group_id, user_id, content
|
||||
):
|
||||
"""Sent by group server to inform a user's server that they have been
|
||||
kicked from the group.
|
||||
"""
|
||||
|
@ -717,10 +694,7 @@ class TransportLayerClient(object):
|
|||
path = _create_v1_path("/groups/local/%s/users/%s/remove", group_id, user_id)
|
||||
|
||||
return self.client.post_json(
|
||||
destination=destination,
|
||||
path=path,
|
||||
data=content,
|
||||
ignore_backoff=True,
|
||||
destination=destination, path=path, data=content, ignore_backoff=True
|
||||
)
|
||||
|
||||
@log_function
|
||||
|
@ -732,24 +706,24 @@ class TransportLayerClient(object):
|
|||
path = _create_v1_path("/groups/%s/renew_attestation/%s", group_id, user_id)
|
||||
|
||||
return self.client.post_json(
|
||||
destination=destination,
|
||||
path=path,
|
||||
data=content,
|
||||
ignore_backoff=True,
|
||||
destination=destination, path=path, data=content, ignore_backoff=True
|
||||
)
|
||||
|
||||
@log_function
|
||||
def update_group_summary_room(self, destination, group_id, user_id, room_id,
|
||||
category_id, content):
|
||||
def update_group_summary_room(
|
||||
self, destination, group_id, user_id, room_id, category_id, content
|
||||
):
|
||||
"""Update a room entry in a group summary
|
||||
"""
|
||||
if category_id:
|
||||
path = _create_v1_path(
|
||||
"/groups/%s/summary/categories/%s/rooms/%s",
|
||||
group_id, category_id, room_id,
|
||||
group_id,
|
||||
category_id,
|
||||
room_id,
|
||||
)
|
||||
else:
|
||||
path = _create_v1_path("/groups/%s/summary/rooms/%s", group_id, room_id,)
|
||||
path = _create_v1_path("/groups/%s/summary/rooms/%s", group_id, room_id)
|
||||
|
||||
return self.client.post_json(
|
||||
destination=destination,
|
||||
|
@ -760,17 +734,20 @@ class TransportLayerClient(object):
|
|||
)
|
||||
|
||||
@log_function
|
||||
def delete_group_summary_room(self, destination, group_id, user_id, room_id,
|
||||
category_id):
|
||||
def delete_group_summary_room(
|
||||
self, destination, group_id, user_id, room_id, category_id
|
||||
):
|
||||
"""Delete a room entry in a group summary
|
||||
"""
|
||||
if category_id:
|
||||
path = _create_v1_path(
|
||||
"/groups/%s/summary/categories/%s/rooms/%s",
|
||||
group_id, category_id, room_id,
|
||||
group_id,
|
||||
category_id,
|
||||
room_id,
|
||||
)
|
||||
else:
|
||||
path = _create_v1_path("/groups/%s/summary/rooms/%s", group_id, room_id,)
|
||||
path = _create_v1_path("/groups/%s/summary/rooms/%s", group_id, room_id)
|
||||
|
||||
return self.client.delete_json(
|
||||
destination=destination,
|
||||
|
@ -783,7 +760,7 @@ class TransportLayerClient(object):
|
|||
def get_group_categories(self, destination, group_id, requester_user_id):
|
||||
"""Get all categories in a group
|
||||
"""
|
||||
path = _create_v1_path("/groups/%s/categories", group_id,)
|
||||
path = _create_v1_path("/groups/%s/categories", group_id)
|
||||
|
||||
return self.client.get_json(
|
||||
destination=destination,
|
||||
|
@ -796,7 +773,7 @@ class TransportLayerClient(object):
|
|||
def get_group_category(self, destination, group_id, requester_user_id, category_id):
|
||||
"""Get category info in a group
|
||||
"""
|
||||
path = _create_v1_path("/groups/%s/categories/%s", group_id, category_id,)
|
||||
path = _create_v1_path("/groups/%s/categories/%s", group_id, category_id)
|
||||
|
||||
return self.client.get_json(
|
||||
destination=destination,
|
||||
|
@ -806,11 +783,12 @@ class TransportLayerClient(object):
|
|||
)
|
||||
|
||||
@log_function
|
||||
def update_group_category(self, destination, group_id, requester_user_id, category_id,
|
||||
content):
|
||||
def update_group_category(
|
||||
self, destination, group_id, requester_user_id, category_id, content
|
||||
):
|
||||
"""Update a category in a group
|
||||
"""
|
||||
path = _create_v1_path("/groups/%s/categories/%s", group_id, category_id,)
|
||||
path = _create_v1_path("/groups/%s/categories/%s", group_id, category_id)
|
||||
|
||||
return self.client.post_json(
|
||||
destination=destination,
|
||||
|
@ -821,11 +799,12 @@ class TransportLayerClient(object):
|
|||
)
|
||||
|
||||
@log_function
|
||||
def delete_group_category(self, destination, group_id, requester_user_id,
|
||||
category_id):
|
||||
def delete_group_category(
|
||||
self, destination, group_id, requester_user_id, category_id
|
||||
):
|
||||
"""Delete a category in a group
|
||||
"""
|
||||
path = _create_v1_path("/groups/%s/categories/%s", group_id, category_id,)
|
||||
path = _create_v1_path("/groups/%s/categories/%s", group_id, category_id)
|
||||
|
||||
return self.client.delete_json(
|
||||
destination=destination,
|
||||
|
@ -838,7 +817,7 @@ class TransportLayerClient(object):
|
|||
def get_group_roles(self, destination, group_id, requester_user_id):
|
||||
"""Get all roles in a group
|
||||
"""
|
||||
path = _create_v1_path("/groups/%s/roles", group_id,)
|
||||
path = _create_v1_path("/groups/%s/roles", group_id)
|
||||
|
||||
return self.client.get_json(
|
||||
destination=destination,
|
||||
|
@ -851,7 +830,7 @@ class TransportLayerClient(object):
|
|||
def get_group_role(self, destination, group_id, requester_user_id, role_id):
|
||||
"""Get a roles info
|
||||
"""
|
||||
path = _create_v1_path("/groups/%s/roles/%s", group_id, role_id,)
|
||||
path = _create_v1_path("/groups/%s/roles/%s", group_id, role_id)
|
||||
|
||||
return self.client.get_json(
|
||||
destination=destination,
|
||||
|
@ -861,11 +840,12 @@ class TransportLayerClient(object):
|
|||
)
|
||||
|
||||
@log_function
|
||||
def update_group_role(self, destination, group_id, requester_user_id, role_id,
|
||||
content):
|
||||
def update_group_role(
|
||||
self, destination, group_id, requester_user_id, role_id, content
|
||||
):
|
||||
"""Update a role in a group
|
||||
"""
|
||||
path = _create_v1_path("/groups/%s/roles/%s", group_id, role_id,)
|
||||
path = _create_v1_path("/groups/%s/roles/%s", group_id, role_id)
|
||||
|
||||
return self.client.post_json(
|
||||
destination=destination,
|
||||
|
@ -879,7 +859,7 @@ class TransportLayerClient(object):
|
|||
def delete_group_role(self, destination, group_id, requester_user_id, role_id):
|
||||
"""Delete a role in a group
|
||||
"""
|
||||
path = _create_v1_path("/groups/%s/roles/%s", group_id, role_id,)
|
||||
path = _create_v1_path("/groups/%s/roles/%s", group_id, role_id)
|
||||
|
||||
return self.client.delete_json(
|
||||
destination=destination,
|
||||
|
@ -889,17 +869,17 @@ class TransportLayerClient(object):
|
|||
)
|
||||
|
||||
@log_function
|
||||
def update_group_summary_user(self, destination, group_id, requester_user_id,
|
||||
user_id, role_id, content):
|
||||
def update_group_summary_user(
|
||||
self, destination, group_id, requester_user_id, user_id, role_id, content
|
||||
):
|
||||
"""Update a users entry in a group
|
||||
"""
|
||||
if role_id:
|
||||
path = _create_v1_path(
|
||||
"/groups/%s/summary/roles/%s/users/%s",
|
||||
group_id, role_id, user_id,
|
||||
"/groups/%s/summary/roles/%s/users/%s", group_id, role_id, user_id
|
||||
)
|
||||
else:
|
||||
path = _create_v1_path("/groups/%s/summary/users/%s", group_id, user_id,)
|
||||
path = _create_v1_path("/groups/%s/summary/users/%s", group_id, user_id)
|
||||
|
||||
return self.client.post_json(
|
||||
destination=destination,
|
||||
|
@ -910,11 +890,10 @@ class TransportLayerClient(object):
|
|||
)
|
||||
|
||||
@log_function
|
||||
def set_group_join_policy(self, destination, group_id, requester_user_id,
|
||||
content):
|
||||
def set_group_join_policy(self, destination, group_id, requester_user_id, content):
|
||||
"""Sets the join policy for a group
|
||||
"""
|
||||
path = _create_v1_path("/groups/%s/settings/m.join_policy", group_id,)
|
||||
path = _create_v1_path("/groups/%s/settings/m.join_policy", group_id)
|
||||
|
||||
return self.client.put_json(
|
||||
destination=destination,
|
||||
|
@ -925,17 +904,17 @@ class TransportLayerClient(object):
|
|||
)
|
||||
|
||||
@log_function
|
||||
def delete_group_summary_user(self, destination, group_id, requester_user_id,
|
||||
user_id, role_id):
|
||||
def delete_group_summary_user(
|
||||
self, destination, group_id, requester_user_id, user_id, role_id
|
||||
):
|
||||
"""Delete a users entry in a group
|
||||
"""
|
||||
if role_id:
|
||||
path = _create_v1_path(
|
||||
"/groups/%s/summary/roles/%s/users/%s",
|
||||
group_id, role_id, user_id,
|
||||
"/groups/%s/summary/roles/%s/users/%s", group_id, role_id, user_id
|
||||
)
|
||||
else:
|
||||
path = _create_v1_path("/groups/%s/summary/users/%s", group_id, user_id,)
|
||||
path = _create_v1_path("/groups/%s/summary/users/%s", group_id, user_id)
|
||||
|
||||
return self.client.delete_json(
|
||||
destination=destination,
|
||||
|
@ -953,10 +932,7 @@ class TransportLayerClient(object):
|
|||
content = {"user_ids": user_ids}
|
||||
|
||||
return self.client.post_json(
|
||||
destination=destination,
|
||||
path=path,
|
||||
data=content,
|
||||
ignore_backoff=True,
|
||||
destination=destination, path=path, data=content, ignore_backoff=True
|
||||
)
|
||||
|
||||
|
||||
|
@ -975,9 +951,8 @@ def _create_v1_path(path, *args):
|
|||
Returns:
|
||||
str
|
||||
"""
|
||||
return (
|
||||
FEDERATION_V1_PREFIX
|
||||
+ path % tuple(urllib.parse.quote(arg, "") for arg in args)
|
||||
return FEDERATION_V1_PREFIX + path % tuple(
|
||||
urllib.parse.quote(arg, "") for arg in args
|
||||
)
|
||||
|
||||
|
||||
|
@ -996,7 +971,6 @@ def _create_v2_path(path, *args):
|
|||
Returns:
|
||||
str
|
||||
"""
|
||||
return (
|
||||
FEDERATION_V2_PREFIX
|
||||
+ path % tuple(urllib.parse.quote(arg, "") for arg in args)
|
||||
return FEDERATION_V2_PREFIX + path % tuple(
|
||||
urllib.parse.quote(arg, "") for arg in args
|
||||
)
|
||||
|
|
|
@ -66,8 +66,7 @@ class TransportLayerServer(JsonResource):
|
|||
|
||||
self.authenticator = Authenticator(hs)
|
||||
self.ratelimiter = FederationRateLimiter(
|
||||
self.clock,
|
||||
config=hs.config.rc_federation,
|
||||
self.clock, config=hs.config.rc_federation
|
||||
)
|
||||
|
||||
self.register_servlets()
|
||||
|
@ -84,11 +83,13 @@ class TransportLayerServer(JsonResource):
|
|||
|
||||
class AuthenticationError(SynapseError):
|
||||
"""There was a problem authenticating the request"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class NoAuthenticationError(AuthenticationError):
|
||||
"""The request had no authentication information"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
|
@ -105,8 +106,8 @@ class Authenticator(object):
|
|||
def authenticate_request(self, request, content):
|
||||
now = self._clock.time_msec()
|
||||
json_request = {
|
||||
"method": request.method.decode('ascii'),
|
||||
"uri": request.uri.decode('ascii'),
|
||||
"method": request.method.decode("ascii"),
|
||||
"uri": request.uri.decode("ascii"),
|
||||
"destination": self.server_name,
|
||||
"signatures": {},
|
||||
}
|
||||
|
@ -120,7 +121,7 @@ class Authenticator(object):
|
|||
|
||||
if not auth_headers:
|
||||
raise NoAuthenticationError(
|
||||
401, "Missing Authorization headers", Codes.UNAUTHORIZED,
|
||||
401, "Missing Authorization headers", Codes.UNAUTHORIZED
|
||||
)
|
||||
|
||||
for auth in auth_headers:
|
||||
|
@ -130,14 +131,14 @@ class Authenticator(object):
|
|||
json_request["signatures"].setdefault(origin, {})[key] = sig
|
||||
|
||||
if (
|
||||
self.federation_domain_whitelist is not None and
|
||||
origin not in self.federation_domain_whitelist
|
||||
self.federation_domain_whitelist is not None
|
||||
and origin not in self.federation_domain_whitelist
|
||||
):
|
||||
raise FederationDeniedError(origin)
|
||||
|
||||
if not json_request["signatures"]:
|
||||
raise NoAuthenticationError(
|
||||
401, "Missing Authorization headers", Codes.UNAUTHORIZED,
|
||||
401, "Missing Authorization headers", Codes.UNAUTHORIZED
|
||||
)
|
||||
|
||||
yield self.keyring.verify_json_for_server(
|
||||
|
@ -177,12 +178,12 @@ def _parse_auth_header(header_bytes):
|
|||
AuthenticationError if the header could not be parsed
|
||||
"""
|
||||
try:
|
||||
header_str = header_bytes.decode('utf-8')
|
||||
header_str = header_bytes.decode("utf-8")
|
||||
params = header_str.split(" ")[1].split(",")
|
||||
param_dict = dict(kv.split("=") for kv in params)
|
||||
|
||||
def strip_quotes(value):
|
||||
if value.startswith("\""):
|
||||
if value.startswith('"'):
|
||||
return value[1:-1]
|
||||
else:
|
||||
return value
|
||||
|
@ -198,11 +199,11 @@ def _parse_auth_header(header_bytes):
|
|||
except Exception as e:
|
||||
logger.warn(
|
||||
"Error parsing auth header '%s': %s",
|
||||
header_bytes.decode('ascii', 'replace'),
|
||||
header_bytes.decode("ascii", "replace"),
|
||||
e,
|
||||
)
|
||||
raise AuthenticationError(
|
||||
400, "Malformed Authorization header", Codes.UNAUTHORIZED,
|
||||
400, "Malformed Authorization header", Codes.UNAUTHORIZED
|
||||
)
|
||||
|
||||
|
||||
|
@ -242,6 +243,7 @@ class BaseFederationServlet(object):
|
|||
Exception: other exceptions will be caught, logged, and a 500 will be
|
||||
returned.
|
||||
"""
|
||||
|
||||
REQUIRE_AUTH = True
|
||||
|
||||
PREFIX = FEDERATION_V1_PREFIX # Allows specifying the API version
|
||||
|
@ -293,9 +295,7 @@ class BaseFederationServlet(object):
|
|||
origin, content, request.args, *args, **kwargs
|
||||
)
|
||||
else:
|
||||
response = yield func(
|
||||
origin, content, request.args, *args, **kwargs
|
||||
)
|
||||
response = yield func(origin, content, request.args, *args, **kwargs)
|
||||
|
||||
defer.returnValue(response)
|
||||
|
||||
|
@ -343,14 +343,12 @@ class FederationSendServlet(BaseFederationServlet):
|
|||
try:
|
||||
transaction_data = content
|
||||
|
||||
logger.debug(
|
||||
"Decoded %s: %s",
|
||||
transaction_id, str(transaction_data)
|
||||
)
|
||||
logger.debug("Decoded %s: %s", transaction_id, str(transaction_data))
|
||||
|
||||
logger.info(
|
||||
"Received txn %s from %s. (PDUs: %d, EDUs: %d)",
|
||||
transaction_id, origin,
|
||||
transaction_id,
|
||||
origin,
|
||||
len(transaction_data.get("pdus", [])),
|
||||
len(transaction_data.get("edus", [])),
|
||||
)
|
||||
|
@ -361,8 +359,7 @@ class FederationSendServlet(BaseFederationServlet):
|
|||
# Add some extra data to the transaction dict that isn't included
|
||||
# in the request body.
|
||||
transaction_data.update(
|
||||
transaction_id=transaction_id,
|
||||
destination=self.server_name
|
||||
transaction_id=transaction_id, destination=self.server_name
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
|
@ -372,7 +369,7 @@ class FederationSendServlet(BaseFederationServlet):
|
|||
|
||||
try:
|
||||
code, response = yield self.handler.on_incoming_transaction(
|
||||
origin, transaction_data,
|
||||
origin, transaction_data
|
||||
)
|
||||
except Exception:
|
||||
logger.exception("on_incoming_transaction failed")
|
||||
|
@ -416,7 +413,7 @@ class FederationBackfillServlet(BaseFederationServlet):
|
|||
PATH = "/backfill/(?P<context>[^/]*)/?"
|
||||
|
||||
def on_GET(self, origin, content, query, context):
|
||||
versions = [x.decode('ascii') for x in query[b"v"]]
|
||||
versions = [x.decode("ascii") for x in query[b"v"]]
|
||||
limit = parse_integer_from_args(query, "limit", None)
|
||||
|
||||
if not limit:
|
||||
|
@ -432,7 +429,7 @@ class FederationQueryServlet(BaseFederationServlet):
|
|||
def on_GET(self, origin, content, query, query_type):
|
||||
return self.handler.on_query_request(
|
||||
query_type,
|
||||
{k.decode('utf8'): v[0].decode("utf-8") for k, v in query.items()}
|
||||
{k.decode("utf8"): v[0].decode("utf-8") for k, v in query.items()},
|
||||
)
|
||||
|
||||
|
||||
|
@ -456,15 +453,14 @@ class FederationMakeJoinServlet(BaseFederationServlet):
|
|||
Deferred[(int, object)|None]: either (response code, response object) to
|
||||
return a JSON response, or None if the request has already been handled.
|
||||
"""
|
||||
versions = query.get(b'ver')
|
||||
versions = query.get(b"ver")
|
||||
if versions is not None:
|
||||
supported_versions = [v.decode("utf-8") for v in versions]
|
||||
else:
|
||||
supported_versions = ["1"]
|
||||
|
||||
content = yield self.handler.on_make_join_request(
|
||||
origin, context, user_id,
|
||||
supported_versions=supported_versions,
|
||||
origin, context, user_id, supported_versions=supported_versions
|
||||
)
|
||||
defer.returnValue((200, content))
|
||||
|
||||
|
@ -474,9 +470,7 @@ class FederationMakeLeaveServlet(BaseFederationServlet):
|
|||
|
||||
@defer.inlineCallbacks
|
||||
def on_GET(self, origin, content, query, context, user_id):
|
||||
content = yield self.handler.on_make_leave_request(
|
||||
origin, context, user_id,
|
||||
)
|
||||
content = yield self.handler.on_make_leave_request(origin, context, user_id)
|
||||
defer.returnValue((200, content))
|
||||
|
||||
|
||||
|
@ -517,7 +511,7 @@ class FederationV1InviteServlet(BaseFederationServlet):
|
|||
# state resolution algorithm, and we don't use that for processing
|
||||
# invites
|
||||
content = yield self.handler.on_invite_request(
|
||||
origin, content, room_version=RoomVersions.V1.identifier,
|
||||
origin, content, room_version=RoomVersions.V1.identifier
|
||||
)
|
||||
|
||||
# V1 federation API is defined to return a content of `[200, {...}]`
|
||||
|
@ -545,7 +539,7 @@ class FederationV2InviteServlet(BaseFederationServlet):
|
|||
event.setdefault("unsigned", {})["invite_room_state"] = invite_room_state
|
||||
|
||||
content = yield self.handler.on_invite_request(
|
||||
origin, event, room_version=room_version,
|
||||
origin, event, room_version=room_version
|
||||
)
|
||||
defer.returnValue((200, content))
|
||||
|
||||
|
@ -629,8 +623,10 @@ class On3pidBindServlet(BaseFederationServlet):
|
|||
for invite in content["invites"]:
|
||||
try:
|
||||
if "signed" not in invite or "token" not in invite["signed"]:
|
||||
message = ("Rejecting received notification of third-"
|
||||
"party invite without signed: %s" % (invite,))
|
||||
message = (
|
||||
"Rejecting received notification of third-"
|
||||
"party invite without signed: %s" % (invite,)
|
||||
)
|
||||
logger.info(message)
|
||||
raise SynapseError(400, message)
|
||||
yield self.handler.exchange_third_party_invite(
|
||||
|
@ -671,18 +667,23 @@ class OpenIdUserInfo(BaseFederationServlet):
|
|||
def on_GET(self, origin, content, query):
|
||||
token = query.get(b"access_token", [None])[0]
|
||||
if token is None:
|
||||
defer.returnValue((401, {
|
||||
"errcode": "M_MISSING_TOKEN", "error": "Access Token required"
|
||||
}))
|
||||
defer.returnValue(
|
||||
(401, {"errcode": "M_MISSING_TOKEN", "error": "Access Token required"})
|
||||
)
|
||||
return
|
||||
|
||||
user_id = yield self.handler.on_openid_userinfo(token.decode('ascii'))
|
||||
user_id = yield self.handler.on_openid_userinfo(token.decode("ascii"))
|
||||
|
||||
if user_id is None:
|
||||
defer.returnValue((401, {
|
||||
"errcode": "M_UNKNOWN_TOKEN",
|
||||
"error": "Access Token unknown or expired"
|
||||
}))
|
||||
defer.returnValue(
|
||||
(
|
||||
401,
|
||||
{
|
||||
"errcode": "M_UNKNOWN_TOKEN",
|
||||
"error": "Access Token unknown or expired",
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
defer.returnValue((200, {"sub": user_id}))
|
||||
|
||||
|
@ -722,7 +723,7 @@ class PublicRoomList(BaseFederationServlet):
|
|||
|
||||
def __init__(self, handler, authenticator, ratelimiter, server_name, deny_access):
|
||||
super(PublicRoomList, self).__init__(
|
||||
handler, authenticator, ratelimiter, server_name,
|
||||
handler, authenticator, ratelimiter, server_name
|
||||
)
|
||||
self.deny_access = deny_access
|
||||
|
||||
|
@ -748,9 +749,7 @@ class PublicRoomList(BaseFederationServlet):
|
|||
network_tuple = ThirdPartyInstanceID(None, None)
|
||||
|
||||
data = yield self.handler.get_local_public_room_list(
|
||||
limit, since_token,
|
||||
network_tuple=network_tuple,
|
||||
from_federation=True,
|
||||
limit, since_token, network_tuple=network_tuple, from_federation=True
|
||||
)
|
||||
defer.returnValue((200, data))
|
||||
|
||||
|
@ -761,17 +760,18 @@ class FederationVersionServlet(BaseFederationServlet):
|
|||
REQUIRE_AUTH = False
|
||||
|
||||
def on_GET(self, origin, content, query):
|
||||
return defer.succeed((200, {
|
||||
"server": {
|
||||
"name": "Synapse",
|
||||
"version": get_version_string(synapse)
|
||||
},
|
||||
}))
|
||||
return defer.succeed(
|
||||
(
|
||||
200,
|
||||
{"server": {"name": "Synapse", "version": get_version_string(synapse)}},
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class FederationGroupsProfileServlet(BaseFederationServlet):
|
||||
"""Get/set the basic profile of a group on behalf of a user
|
||||
"""
|
||||
|
||||
PATH = "/groups/(?P<group_id>[^/]*)/profile"
|
||||
|
||||
@defer.inlineCallbacks
|
||||
|
@ -780,9 +780,7 @@ class FederationGroupsProfileServlet(BaseFederationServlet):
|
|||
if get_domain_from_id(requester_user_id) != origin:
|
||||
raise SynapseError(403, "requester_user_id doesn't match origin")
|
||||
|
||||
new_content = yield self.handler.get_group_profile(
|
||||
group_id, requester_user_id
|
||||
)
|
||||
new_content = yield self.handler.get_group_profile(group_id, requester_user_id)
|
||||
|
||||
defer.returnValue((200, new_content))
|
||||
|
||||
|
@ -808,9 +806,7 @@ class FederationGroupsSummaryServlet(BaseFederationServlet):
|
|||
if get_domain_from_id(requester_user_id) != origin:
|
||||
raise SynapseError(403, "requester_user_id doesn't match origin")
|
||||
|
||||
new_content = yield self.handler.get_group_summary(
|
||||
group_id, requester_user_id
|
||||
)
|
||||
new_content = yield self.handler.get_group_summary(group_id, requester_user_id)
|
||||
|
||||
defer.returnValue((200, new_content))
|
||||
|
||||
|
@ -818,6 +814,7 @@ class FederationGroupsSummaryServlet(BaseFederationServlet):
|
|||
class FederationGroupsRoomsServlet(BaseFederationServlet):
|
||||
"""Get the rooms in a group on behalf of a user
|
||||
"""
|
||||
|
||||
PATH = "/groups/(?P<group_id>[^/]*)/rooms"
|
||||
|
||||
@defer.inlineCallbacks
|
||||
|
@ -826,9 +823,7 @@ class FederationGroupsRoomsServlet(BaseFederationServlet):
|
|||
if get_domain_from_id(requester_user_id) != origin:
|
||||
raise SynapseError(403, "requester_user_id doesn't match origin")
|
||||
|
||||
new_content = yield self.handler.get_rooms_in_group(
|
||||
group_id, requester_user_id
|
||||
)
|
||||
new_content = yield self.handler.get_rooms_in_group(group_id, requester_user_id)
|
||||
|
||||
defer.returnValue((200, new_content))
|
||||
|
||||
|
@ -836,6 +831,7 @@ class FederationGroupsRoomsServlet(BaseFederationServlet):
|
|||
class FederationGroupsAddRoomsServlet(BaseFederationServlet):
|
||||
"""Add/remove room from group
|
||||
"""
|
||||
|
||||
PATH = "/groups/(?P<group_id>[^/]*)/room/(?P<room_id>[^/]*)"
|
||||
|
||||
@defer.inlineCallbacks
|
||||
|
@ -857,7 +853,7 @@ class FederationGroupsAddRoomsServlet(BaseFederationServlet):
|
|||
raise SynapseError(403, "requester_user_id doesn't match origin")
|
||||
|
||||
new_content = yield self.handler.remove_room_from_group(
|
||||
group_id, requester_user_id, room_id,
|
||||
group_id, requester_user_id, room_id
|
||||
)
|
||||
|
||||
defer.returnValue((200, new_content))
|
||||
|
@ -866,6 +862,7 @@ class FederationGroupsAddRoomsServlet(BaseFederationServlet):
|
|||
class FederationGroupsAddRoomsConfigServlet(BaseFederationServlet):
|
||||
"""Update room config in group
|
||||
"""
|
||||
|
||||
PATH = (
|
||||
"/groups/(?P<group_id>[^/]*)/room/(?P<room_id>[^/]*)"
|
||||
"/config/(?P<config_key>[^/]*)"
|
||||
|
@ -878,7 +875,7 @@ class FederationGroupsAddRoomsConfigServlet(BaseFederationServlet):
|
|||
raise SynapseError(403, "requester_user_id doesn't match origin")
|
||||
|
||||
result = yield self.groups_handler.update_room_in_group(
|
||||
group_id, requester_user_id, room_id, config_key, content,
|
||||
group_id, requester_user_id, room_id, config_key, content
|
||||
)
|
||||
|
||||
defer.returnValue((200, result))
|
||||
|
@ -887,6 +884,7 @@ class FederationGroupsAddRoomsConfigServlet(BaseFederationServlet):
|
|||
class FederationGroupsUsersServlet(BaseFederationServlet):
|
||||
"""Get the users in a group on behalf of a user
|
||||
"""
|
||||
|
||||
PATH = "/groups/(?P<group_id>[^/]*)/users"
|
||||
|
||||
@defer.inlineCallbacks
|
||||
|
@ -895,9 +893,7 @@ class FederationGroupsUsersServlet(BaseFederationServlet):
|
|||
if get_domain_from_id(requester_user_id) != origin:
|
||||
raise SynapseError(403, "requester_user_id doesn't match origin")
|
||||
|
||||
new_content = yield self.handler.get_users_in_group(
|
||||
group_id, requester_user_id
|
||||
)
|
||||
new_content = yield self.handler.get_users_in_group(group_id, requester_user_id)
|
||||
|
||||
defer.returnValue((200, new_content))
|
||||
|
||||
|
@ -905,6 +901,7 @@ class FederationGroupsUsersServlet(BaseFederationServlet):
|
|||
class FederationGroupsInvitedUsersServlet(BaseFederationServlet):
|
||||
"""Get the users that have been invited to a group
|
||||
"""
|
||||
|
||||
PATH = "/groups/(?P<group_id>[^/]*)/invited_users"
|
||||
|
||||
@defer.inlineCallbacks
|
||||
|
@ -923,6 +920,7 @@ class FederationGroupsInvitedUsersServlet(BaseFederationServlet):
|
|||
class FederationGroupsInviteServlet(BaseFederationServlet):
|
||||
"""Ask a group server to invite someone to the group
|
||||
"""
|
||||
|
||||
PATH = "/groups/(?P<group_id>[^/]*)/users/(?P<user_id>[^/]*)/invite"
|
||||
|
||||
@defer.inlineCallbacks
|
||||
|
@ -932,7 +930,7 @@ class FederationGroupsInviteServlet(BaseFederationServlet):
|
|||
raise SynapseError(403, "requester_user_id doesn't match origin")
|
||||
|
||||
new_content = yield self.handler.invite_to_group(
|
||||
group_id, user_id, requester_user_id, content,
|
||||
group_id, user_id, requester_user_id, content
|
||||
)
|
||||
|
||||
defer.returnValue((200, new_content))
|
||||
|
@ -941,6 +939,7 @@ class FederationGroupsInviteServlet(BaseFederationServlet):
|
|||
class FederationGroupsAcceptInviteServlet(BaseFederationServlet):
|
||||
"""Accept an invitation from the group server
|
||||
"""
|
||||
|
||||
PATH = "/groups/(?P<group_id>[^/]*)/users/(?P<user_id>[^/]*)/accept_invite"
|
||||
|
||||
@defer.inlineCallbacks
|
||||
|
@ -948,9 +947,7 @@ class FederationGroupsAcceptInviteServlet(BaseFederationServlet):
|
|||
if get_domain_from_id(user_id) != origin:
|
||||
raise SynapseError(403, "user_id doesn't match origin")
|
||||
|
||||
new_content = yield self.handler.accept_invite(
|
||||
group_id, user_id, content,
|
||||
)
|
||||
new_content = yield self.handler.accept_invite(group_id, user_id, content)
|
||||
|
||||
defer.returnValue((200, new_content))
|
||||
|
||||
|
@ -958,6 +955,7 @@ class FederationGroupsAcceptInviteServlet(BaseFederationServlet):
|
|||
class FederationGroupsJoinServlet(BaseFederationServlet):
|
||||
"""Attempt to join a group
|
||||
"""
|
||||
|
||||
PATH = "/groups/(?P<group_id>[^/]*)/users/(?P<user_id>[^/]*)/join"
|
||||
|
||||
@defer.inlineCallbacks
|
||||
|
@ -965,9 +963,7 @@ class FederationGroupsJoinServlet(BaseFederationServlet):
|
|||
if get_domain_from_id(user_id) != origin:
|
||||
raise SynapseError(403, "user_id doesn't match origin")
|
||||
|
||||
new_content = yield self.handler.join_group(
|
||||
group_id, user_id, content,
|
||||
)
|
||||
new_content = yield self.handler.join_group(group_id, user_id, content)
|
||||
|
||||
defer.returnValue((200, new_content))
|
||||
|
||||
|
@ -975,6 +971,7 @@ class FederationGroupsJoinServlet(BaseFederationServlet):
|
|||
class FederationGroupsRemoveUserServlet(BaseFederationServlet):
|
||||
"""Leave or kick a user from the group
|
||||
"""
|
||||
|
||||
PATH = "/groups/(?P<group_id>[^/]*)/users/(?P<user_id>[^/]*)/remove"
|
||||
|
||||
@defer.inlineCallbacks
|
||||
|
@ -984,7 +981,7 @@ class FederationGroupsRemoveUserServlet(BaseFederationServlet):
|
|||
raise SynapseError(403, "requester_user_id doesn't match origin")
|
||||
|
||||
new_content = yield self.handler.remove_user_from_group(
|
||||
group_id, user_id, requester_user_id, content,
|
||||
group_id, user_id, requester_user_id, content
|
||||
)
|
||||
|
||||
defer.returnValue((200, new_content))
|
||||
|
@ -993,6 +990,7 @@ class FederationGroupsRemoveUserServlet(BaseFederationServlet):
|
|||
class FederationGroupsLocalInviteServlet(BaseFederationServlet):
|
||||
"""A group server has invited a local user
|
||||
"""
|
||||
|
||||
PATH = "/groups/local/(?P<group_id>[^/]*)/users/(?P<user_id>[^/]*)/invite"
|
||||
|
||||
@defer.inlineCallbacks
|
||||
|
@ -1000,9 +998,7 @@ class FederationGroupsLocalInviteServlet(BaseFederationServlet):
|
|||
if get_domain_from_id(group_id) != origin:
|
||||
raise SynapseError(403, "group_id doesn't match origin")
|
||||
|
||||
new_content = yield self.handler.on_invite(
|
||||
group_id, user_id, content,
|
||||
)
|
||||
new_content = yield self.handler.on_invite(group_id, user_id, content)
|
||||
|
||||
defer.returnValue((200, new_content))
|
||||
|
||||
|
@ -1010,6 +1006,7 @@ class FederationGroupsLocalInviteServlet(BaseFederationServlet):
|
|||
class FederationGroupsRemoveLocalUserServlet(BaseFederationServlet):
|
||||
"""A group server has removed a local user
|
||||
"""
|
||||
|
||||
PATH = "/groups/local/(?P<group_id>[^/]*)/users/(?P<user_id>[^/]*)/remove"
|
||||
|
||||
@defer.inlineCallbacks
|
||||
|
@ -1018,7 +1015,7 @@ class FederationGroupsRemoveLocalUserServlet(BaseFederationServlet):
|
|||
raise SynapseError(403, "user_id doesn't match origin")
|
||||
|
||||
new_content = yield self.handler.user_removed_from_group(
|
||||
group_id, user_id, content,
|
||||
group_id, user_id, content
|
||||
)
|
||||
|
||||
defer.returnValue((200, new_content))
|
||||
|
@ -1027,6 +1024,7 @@ class FederationGroupsRemoveLocalUserServlet(BaseFederationServlet):
|
|||
class FederationGroupsRenewAttestaionServlet(BaseFederationServlet):
|
||||
"""A group or user's server renews their attestation
|
||||
"""
|
||||
|
||||
PATH = "/groups/(?P<group_id>[^/]*)/renew_attestation/(?P<user_id>[^/]*)"
|
||||
|
||||
@defer.inlineCallbacks
|
||||
|
@ -1047,6 +1045,7 @@ class FederationGroupsSummaryRoomsServlet(BaseFederationServlet):
|
|||
- /groups/:group/summary/rooms/:room_id
|
||||
- /groups/:group/summary/categories/:category/rooms/:room_id
|
||||
"""
|
||||
|
||||
PATH = (
|
||||
"/groups/(?P<group_id>[^/]*)/summary"
|
||||
"(/categories/(?P<category_id>[^/]+))?"
|
||||
|
@ -1063,7 +1062,8 @@ class FederationGroupsSummaryRoomsServlet(BaseFederationServlet):
|
|||
raise SynapseError(400, "category_id cannot be empty string")
|
||||
|
||||
resp = yield self.handler.update_group_summary_room(
|
||||
group_id, requester_user_id,
|
||||
group_id,
|
||||
requester_user_id,
|
||||
room_id=room_id,
|
||||
category_id=category_id,
|
||||
content=content,
|
||||
|
@ -1081,9 +1081,7 @@ class FederationGroupsSummaryRoomsServlet(BaseFederationServlet):
|
|||
raise SynapseError(400, "category_id cannot be empty string")
|
||||
|
||||
resp = yield self.handler.delete_group_summary_room(
|
||||
group_id, requester_user_id,
|
||||
room_id=room_id,
|
||||
category_id=category_id,
|
||||
group_id, requester_user_id, room_id=room_id, category_id=category_id
|
||||
)
|
||||
|
||||
defer.returnValue((200, resp))
|
||||
|
@ -1092,9 +1090,8 @@ class FederationGroupsSummaryRoomsServlet(BaseFederationServlet):
|
|||
class FederationGroupsCategoriesServlet(BaseFederationServlet):
|
||||
"""Get all categories for a group
|
||||
"""
|
||||
PATH = (
|
||||
"/groups/(?P<group_id>[^/]*)/categories/?"
|
||||
)
|
||||
|
||||
PATH = "/groups/(?P<group_id>[^/]*)/categories/?"
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_GET(self, origin, content, query, group_id):
|
||||
|
@ -1102,9 +1099,7 @@ class FederationGroupsCategoriesServlet(BaseFederationServlet):
|
|||
if get_domain_from_id(requester_user_id) != origin:
|
||||
raise SynapseError(403, "requester_user_id doesn't match origin")
|
||||
|
||||
resp = yield self.handler.get_group_categories(
|
||||
group_id, requester_user_id,
|
||||
)
|
||||
resp = yield self.handler.get_group_categories(group_id, requester_user_id)
|
||||
|
||||
defer.returnValue((200, resp))
|
||||
|
||||
|
@ -1112,9 +1107,8 @@ class FederationGroupsCategoriesServlet(BaseFederationServlet):
|
|||
class FederationGroupsCategoryServlet(BaseFederationServlet):
|
||||
"""Add/remove/get a category in a group
|
||||
"""
|
||||
PATH = (
|
||||
"/groups/(?P<group_id>[^/]*)/categories/(?P<category_id>[^/]+)"
|
||||
)
|
||||
|
||||
PATH = "/groups/(?P<group_id>[^/]*)/categories/(?P<category_id>[^/]+)"
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_GET(self, origin, content, query, group_id, category_id):
|
||||
|
@ -1138,7 +1132,7 @@ class FederationGroupsCategoryServlet(BaseFederationServlet):
|
|||
raise SynapseError(400, "category_id cannot be empty string")
|
||||
|
||||
resp = yield self.handler.upsert_group_category(
|
||||
group_id, requester_user_id, category_id, content,
|
||||
group_id, requester_user_id, category_id, content
|
||||
)
|
||||
|
||||
defer.returnValue((200, resp))
|
||||
|
@ -1153,7 +1147,7 @@ class FederationGroupsCategoryServlet(BaseFederationServlet):
|
|||
raise SynapseError(400, "category_id cannot be empty string")
|
||||
|
||||
resp = yield self.handler.delete_group_category(
|
||||
group_id, requester_user_id, category_id,
|
||||
group_id, requester_user_id, category_id
|
||||
)
|
||||
|
||||
defer.returnValue((200, resp))
|
||||
|
@ -1162,9 +1156,8 @@ class FederationGroupsCategoryServlet(BaseFederationServlet):
|
|||
class FederationGroupsRolesServlet(BaseFederationServlet):
|
||||
"""Get roles in a group
|
||||
"""
|
||||
PATH = (
|
||||
"/groups/(?P<group_id>[^/]*)/roles/?"
|
||||
)
|
||||
|
||||
PATH = "/groups/(?P<group_id>[^/]*)/roles/?"
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_GET(self, origin, content, query, group_id):
|
||||
|
@ -1172,9 +1165,7 @@ class FederationGroupsRolesServlet(BaseFederationServlet):
|
|||
if get_domain_from_id(requester_user_id) != origin:
|
||||
raise SynapseError(403, "requester_user_id doesn't match origin")
|
||||
|
||||
resp = yield self.handler.get_group_roles(
|
||||
group_id, requester_user_id,
|
||||
)
|
||||
resp = yield self.handler.get_group_roles(group_id, requester_user_id)
|
||||
|
||||
defer.returnValue((200, resp))
|
||||
|
||||
|
@ -1182,9 +1173,8 @@ class FederationGroupsRolesServlet(BaseFederationServlet):
|
|||
class FederationGroupsRoleServlet(BaseFederationServlet):
|
||||
"""Add/remove/get a role in a group
|
||||
"""
|
||||
PATH = (
|
||||
"/groups/(?P<group_id>[^/]*)/roles/(?P<role_id>[^/]+)"
|
||||
)
|
||||
|
||||
PATH = "/groups/(?P<group_id>[^/]*)/roles/(?P<role_id>[^/]+)"
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_GET(self, origin, content, query, group_id, role_id):
|
||||
|
@ -1192,9 +1182,7 @@ class FederationGroupsRoleServlet(BaseFederationServlet):
|
|||
if get_domain_from_id(requester_user_id) != origin:
|
||||
raise SynapseError(403, "requester_user_id doesn't match origin")
|
||||
|
||||
resp = yield self.handler.get_group_role(
|
||||
group_id, requester_user_id, role_id
|
||||
)
|
||||
resp = yield self.handler.get_group_role(group_id, requester_user_id, role_id)
|
||||
|
||||
defer.returnValue((200, resp))
|
||||
|
||||
|
@ -1208,7 +1196,7 @@ class FederationGroupsRoleServlet(BaseFederationServlet):
|
|||
raise SynapseError(400, "role_id cannot be empty string")
|
||||
|
||||
resp = yield self.handler.update_group_role(
|
||||
group_id, requester_user_id, role_id, content,
|
||||
group_id, requester_user_id, role_id, content
|
||||
)
|
||||
|
||||
defer.returnValue((200, resp))
|
||||
|
@ -1223,7 +1211,7 @@ class FederationGroupsRoleServlet(BaseFederationServlet):
|
|||
raise SynapseError(400, "role_id cannot be empty string")
|
||||
|
||||
resp = yield self.handler.delete_group_role(
|
||||
group_id, requester_user_id, role_id,
|
||||
group_id, requester_user_id, role_id
|
||||
)
|
||||
|
||||
defer.returnValue((200, resp))
|
||||
|
@ -1236,6 +1224,7 @@ class FederationGroupsSummaryUsersServlet(BaseFederationServlet):
|
|||
- /groups/:group/summary/users/:user_id
|
||||
- /groups/:group/summary/roles/:role/users/:user_id
|
||||
"""
|
||||
|
||||
PATH = (
|
||||
"/groups/(?P<group_id>[^/]*)/summary"
|
||||
"(/roles/(?P<role_id>[^/]+))?"
|
||||
|
@ -1252,7 +1241,8 @@ class FederationGroupsSummaryUsersServlet(BaseFederationServlet):
|
|||
raise SynapseError(400, "role_id cannot be empty string")
|
||||
|
||||
resp = yield self.handler.update_group_summary_user(
|
||||
group_id, requester_user_id,
|
||||
group_id,
|
||||
requester_user_id,
|
||||
user_id=user_id,
|
||||
role_id=role_id,
|
||||
content=content,
|
||||
|
@ -1270,9 +1260,7 @@ class FederationGroupsSummaryUsersServlet(BaseFederationServlet):
|
|||
raise SynapseError(400, "role_id cannot be empty string")
|
||||
|
||||
resp = yield self.handler.delete_group_summary_user(
|
||||
group_id, requester_user_id,
|
||||
user_id=user_id,
|
||||
role_id=role_id,
|
||||
group_id, requester_user_id, user_id=user_id, role_id=role_id
|
||||
)
|
||||
|
||||
defer.returnValue((200, resp))
|
||||
|
@ -1281,14 +1269,13 @@ class FederationGroupsSummaryUsersServlet(BaseFederationServlet):
|
|||
class FederationGroupsBulkPublicisedServlet(BaseFederationServlet):
|
||||
"""Get roles in a group
|
||||
"""
|
||||
PATH = (
|
||||
"/get_groups_publicised"
|
||||
)
|
||||
|
||||
PATH = "/get_groups_publicised"
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def on_POST(self, origin, content, query):
|
||||
resp = yield self.handler.bulk_get_publicised_groups(
|
||||
content["user_ids"], proxy=False,
|
||||
content["user_ids"], proxy=False
|
||||
)
|
||||
|
||||
defer.returnValue((200, resp))
|
||||
|
@ -1297,6 +1284,7 @@ class FederationGroupsBulkPublicisedServlet(BaseFederationServlet):
|
|||
class FederationGroupsSettingJoinPolicyServlet(BaseFederationServlet):
|
||||
"""Sets whether a group is joinable without an invite or knock
|
||||
"""
|
||||
|
||||
PATH = "/groups/(?P<group_id>[^/]*)/settings/m.join_policy"
|
||||
|
||||
@defer.inlineCallbacks
|
||||
|
@ -1317,6 +1305,7 @@ class RoomComplexityServlet(BaseFederationServlet):
|
|||
Indicates to other servers how complex (and therefore likely
|
||||
resource-intensive) a public room this server knows about is.
|
||||
"""
|
||||
|
||||
PATH = "/rooms/(?P<room_id>[^/]*)/complexity"
|
||||
PREFIX = FEDERATION_UNSTABLE_PREFIX
|
||||
|
||||
|
@ -1325,9 +1314,7 @@ class RoomComplexityServlet(BaseFederationServlet):
|
|||
|
||||
store = self.handler.hs.get_datastore()
|
||||
|
||||
is_public = yield store.is_room_world_readable_or_publicly_joinable(
|
||||
room_id
|
||||
)
|
||||
is_public = yield store.is_room_world_readable_or_publicly_joinable(room_id)
|
||||
|
||||
if not is_public:
|
||||
raise SynapseError(404, "Room not found", errcode=Codes.INVALID_PARAM)
|
||||
|
@ -1362,13 +1349,9 @@ FEDERATION_SERVLET_CLASSES = (
|
|||
RoomComplexityServlet,
|
||||
)
|
||||
|
||||
OPENID_SERVLET_CLASSES = (
|
||||
OpenIdUserInfo,
|
||||
)
|
||||
OPENID_SERVLET_CLASSES = (OpenIdUserInfo,)
|
||||
|
||||
ROOM_LIST_CLASSES = (
|
||||
PublicRoomList,
|
||||
)
|
||||
ROOM_LIST_CLASSES = (PublicRoomList,)
|
||||
|
||||
GROUP_SERVER_SERVLET_CLASSES = (
|
||||
FederationGroupsProfileServlet,
|
||||
|
@ -1399,9 +1382,7 @@ GROUP_LOCAL_SERVLET_CLASSES = (
|
|||
)
|
||||
|
||||
|
||||
GROUP_ATTESTATION_SERVLET_CLASSES = (
|
||||
FederationGroupsRenewAttestaionServlet,
|
||||
)
|
||||
GROUP_ATTESTATION_SERVLET_CLASSES = (FederationGroupsRenewAttestaionServlet,)
|
||||
|
||||
DEFAULT_SERVLET_GROUPS = (
|
||||
"federation",
|
||||
|
|
|
@ -32,21 +32,11 @@ class Edu(JsonEncodedObject):
|
|||
internal ID or previous references graph.
|
||||
"""
|
||||
|
||||
valid_keys = [
|
||||
"origin",
|
||||
"destination",
|
||||
"edu_type",
|
||||
"content",
|
||||
]
|
||||
valid_keys = ["origin", "destination", "edu_type", "content"]
|
||||
|
||||
required_keys = [
|
||||
"edu_type",
|
||||
]
|
||||
required_keys = ["edu_type"]
|
||||
|
||||
internal_keys = [
|
||||
"origin",
|
||||
"destination",
|
||||
]
|
||||
internal_keys = ["origin", "destination"]
|
||||
|
||||
|
||||
class Transaction(JsonEncodedObject):
|
||||
|
@ -75,10 +65,7 @@ class Transaction(JsonEncodedObject):
|
|||
"edus",
|
||||
]
|
||||
|
||||
internal_keys = [
|
||||
"transaction_id",
|
||||
"destination",
|
||||
]
|
||||
internal_keys = ["transaction_id", "destination"]
|
||||
|
||||
required_keys = [
|
||||
"transaction_id",
|
||||
|
@ -98,9 +85,7 @@ class Transaction(JsonEncodedObject):
|
|||
del kwargs["edus"]
|
||||
|
||||
super(Transaction, self).__init__(
|
||||
transaction_id=transaction_id,
|
||||
pdus=pdus,
|
||||
**kwargs
|
||||
transaction_id=transaction_id, pdus=pdus, **kwargs
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
|
@ -109,13 +94,9 @@ class Transaction(JsonEncodedObject):
|
|||
transaction_id and origin_server_ts keys.
|
||||
"""
|
||||
if "origin_server_ts" not in kwargs:
|
||||
raise KeyError(
|
||||
"Require 'origin_server_ts' to construct a Transaction"
|
||||
)
|
||||
raise KeyError("Require 'origin_server_ts' to construct a Transaction")
|
||||
if "transaction_id" not in kwargs:
|
||||
raise KeyError(
|
||||
"Require 'transaction_id' to construct a Transaction"
|
||||
)
|
||||
raise KeyError("Require 'transaction_id' to construct a Transaction")
|
||||
|
||||
kwargs["pdus"] = [p.get_pdu_json() for p in pdus]
|
||||
|
||||
|
|
|
@ -65,6 +65,7 @@ UPDATE_ATTESTATION_TIME_MS = 1 * 24 * 60 * 60 * 1000
|
|||
class GroupAttestationSigning(object):
|
||||
"""Creates and verifies group attestations.
|
||||
"""
|
||||
|
||||
def __init__(self, hs):
|
||||
self.keyring = hs.get_keyring()
|
||||
self.clock = hs.get_clock()
|
||||
|
@ -113,11 +114,15 @@ class GroupAttestationSigning(object):
|
|||
validity_period *= random.uniform(*DEFAULT_ATTESTATION_JITTER)
|
||||
valid_until_ms = int(self.clock.time_msec() + validity_period)
|
||||
|
||||
return sign_json({
|
||||
"group_id": group_id,
|
||||
"user_id": user_id,
|
||||
"valid_until_ms": valid_until_ms,
|
||||
}, self.server_name, self.signing_key)
|
||||
return sign_json(
|
||||
{
|
||||
"group_id": group_id,
|
||||
"user_id": user_id,
|
||||
"valid_until_ms": valid_until_ms,
|
||||
},
|
||||
self.server_name,
|
||||
self.signing_key,
|
||||
)
|
||||
|
||||
|
||||
class GroupAttestionRenewer(object):
|
||||
|
@ -134,7 +139,7 @@ class GroupAttestionRenewer(object):
|
|||
|
||||
if not hs.config.worker_app:
|
||||
self._renew_attestations_loop = self.clock.looping_call(
|
||||
self._start_renew_attestations, 30 * 60 * 1000,
|
||||
self._start_renew_attestations, 30 * 60 * 1000
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
|
@ -147,9 +152,7 @@ class GroupAttestionRenewer(object):
|
|||
raise SynapseError(400, "Neither user not group are on this server")
|
||||
|
||||
yield self.attestations.verify_attestation(
|
||||
attestation,
|
||||
user_id=user_id,
|
||||
group_id=group_id,
|
||||
attestation, user_id=user_id, group_id=group_id
|
||||
)
|
||||
|
||||
yield self.store.update_remote_attestion(group_id, user_id, attestation)
|
||||
|
@ -180,7 +183,8 @@ class GroupAttestionRenewer(object):
|
|||
else:
|
||||
logger.warn(
|
||||
"Incorrectly trying to do attestations for user: %r in %r",
|
||||
user_id, group_id,
|
||||
user_id,
|
||||
group_id,
|
||||
)
|
||||
yield self.store.remove_attestation_renewal(group_id, user_id)
|
||||
return
|
||||
|
@ -188,8 +192,7 @@ class GroupAttestionRenewer(object):
|
|||
attestation = self.attestations.create_attestation(group_id, user_id)
|
||||
|
||||
yield self.transport_client.renew_group_attestation(
|
||||
destination, group_id, user_id,
|
||||
content={"attestation": attestation},
|
||||
destination, group_id, user_id, content={"attestation": attestation}
|
||||
)
|
||||
|
||||
yield self.store.update_attestation_renewal(
|
||||
|
@ -197,12 +200,12 @@ class GroupAttestionRenewer(object):
|
|||
)
|
||||
except (RequestSendFailed, HttpResponseException) as e:
|
||||
logger.warning(
|
||||
"Failed to renew attestation of %r in %r: %s",
|
||||
user_id, group_id, e,
|
||||
"Failed to renew attestation of %r in %r: %s", user_id, group_id, e
|
||||
)
|
||||
except Exception:
|
||||
logger.exception("Error renewing attestation of %r in %r",
|
||||
user_id, group_id)
|
||||
logger.exception(
|
||||
"Error renewing attestation of %r in %r", user_id, group_id
|
||||
)
|
||||
|
||||
for row in rows:
|
||||
group_id = row["group_id"]
|
||||
|
|
|
@ -54,8 +54,9 @@ class GroupsServerHandler(object):
|
|||
hs.get_groups_attestation_renewer()
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def check_group_is_ours(self, group_id, requester_user_id,
|
||||
and_exists=False, and_is_admin=None):
|
||||
def check_group_is_ours(
|
||||
self, group_id, requester_user_id, and_exists=False, and_is_admin=None
|
||||
):
|
||||
"""Check that the group is ours, and optionally if it exists.
|
||||
|
||||
If group does exist then return group.
|
||||
|
@ -73,7 +74,9 @@ class GroupsServerHandler(object):
|
|||
if and_exists and not group:
|
||||
raise SynapseError(404, "Unknown group")
|
||||
|
||||
is_user_in_group = yield self.store.is_user_in_group(requester_user_id, group_id)
|
||||
is_user_in_group = yield self.store.is_user_in_group(
|
||||
requester_user_id, group_id
|
||||
)
|
||||
if group and not is_user_in_group and not group["is_public"]:
|
||||
raise SynapseError(404, "Unknown group")
|
||||
|
||||
|
@ -96,25 +99,27 @@ class GroupsServerHandler(object):
|
|||
"""
|
||||
yield self.check_group_is_ours(group_id, requester_user_id, and_exists=True)
|
||||
|
||||
is_user_in_group = yield self.store.is_user_in_group(requester_user_id, group_id)
|
||||
is_user_in_group = yield self.store.is_user_in_group(
|
||||
requester_user_id, group_id
|
||||
)
|
||||
|
||||
profile = yield self.get_group_profile(group_id, requester_user_id)
|
||||
|
||||
users, roles = yield self.store.get_users_for_summary_by_role(
|
||||
group_id, include_private=is_user_in_group,
|
||||
group_id, include_private=is_user_in_group
|
||||
)
|
||||
|
||||
# TODO: Add profiles to users
|
||||
|
||||
rooms, categories = yield self.store.get_rooms_for_summary_by_category(
|
||||
group_id, include_private=is_user_in_group,
|
||||
group_id, include_private=is_user_in_group
|
||||
)
|
||||
|
||||
for room_entry in rooms:
|
||||
room_id = room_entry["room_id"]
|
||||
joined_users = yield self.store.get_users_in_room(room_id)
|
||||
entry = yield self.room_list_handler.generate_room_entry(
|
||||
room_id, len(joined_users), with_alias=False, allow_private=True,
|
||||
room_id, len(joined_users), with_alias=False, allow_private=True
|
||||
)
|
||||
entry = dict(entry) # so we don't change whats cached
|
||||
entry.pop("room_id", None)
|
||||
|
@ -134,7 +139,7 @@ class GroupsServerHandler(object):
|
|||
entry["attestation"] = attestation
|
||||
else:
|
||||
entry["attestation"] = self.attestations.create_attestation(
|
||||
group_id, user_id,
|
||||
group_id, user_id
|
||||
)
|
||||
|
||||
user_profile = yield self.profile_handler.get_profile_from_cache(user_id)
|
||||
|
@ -143,34 +148,34 @@ class GroupsServerHandler(object):
|
|||
users.sort(key=lambda e: e.get("order", 0))
|
||||
|
||||
membership_info = yield self.store.get_users_membership_info_in_group(
|
||||
group_id, requester_user_id,
|
||||
group_id, requester_user_id
|
||||
)
|
||||
|
||||
defer.returnValue({
|
||||
"profile": profile,
|
||||
"users_section": {
|
||||
"users": users,
|
||||
"roles": roles,
|
||||
"total_user_count_estimate": 0, # TODO
|
||||
},
|
||||
"rooms_section": {
|
||||
"rooms": rooms,
|
||||
"categories": categories,
|
||||
"total_room_count_estimate": 0, # TODO
|
||||
},
|
||||
"user": membership_info,
|
||||
})
|
||||
defer.returnValue(
|
||||
{
|
||||
"profile": profile,
|
||||
"users_section": {
|
||||
"users": users,
|
||||
"roles": roles,
|
||||
"total_user_count_estimate": 0, # TODO
|
||||
},
|
||||
"rooms_section": {
|
||||
"rooms": rooms,
|
||||
"categories": categories,
|
||||
"total_room_count_estimate": 0, # TODO
|
||||
},
|
||||
"user": membership_info,
|
||||
}
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def update_group_summary_room(self, group_id, requester_user_id,
|
||||
room_id, category_id, content):
|
||||
def update_group_summary_room(
|
||||
self, group_id, requester_user_id, room_id, category_id, content
|
||||
):
|
||||
"""Add/update a room to the group summary
|
||||
"""
|
||||
yield self.check_group_is_ours(
|
||||
group_id,
|
||||
requester_user_id,
|
||||
and_exists=True,
|
||||
and_is_admin=requester_user_id,
|
||||
group_id, requester_user_id, and_exists=True, and_is_admin=requester_user_id
|
||||
)
|
||||
|
||||
RoomID.from_string(room_id) # Ensure valid room id
|
||||
|
@ -190,21 +195,17 @@ class GroupsServerHandler(object):
|
|||
defer.returnValue({})
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def delete_group_summary_room(self, group_id, requester_user_id,
|
||||
room_id, category_id):
|
||||
def delete_group_summary_room(
|
||||
self, group_id, requester_user_id, room_id, category_id
|
||||
):
|
||||
"""Remove a room from the summary
|
||||
"""
|
||||
yield self.check_group_is_ours(
|
||||
group_id,
|
||||
requester_user_id,
|
||||
and_exists=True,
|
||||
and_is_admin=requester_user_id,
|
||||
group_id, requester_user_id, and_exists=True, and_is_admin=requester_user_id
|
||||
)
|
||||
|
||||
yield self.store.remove_room_from_summary(
|
||||
group_id=group_id,
|
||||
room_id=room_id,
|
||||
category_id=category_id,
|
||||
group_id=group_id, room_id=room_id, category_id=category_id
|
||||
)
|
||||
|
||||
defer.returnValue({})
|
||||
|
@ -223,9 +224,7 @@ class GroupsServerHandler(object):
|
|||
|
||||
join_policy = _parse_join_policy_from_contents(content)
|
||||
if join_policy is None:
|
||||
raise SynapseError(
|
||||
400, "No value specified for 'm.join_policy'"
|
||||
)
|
||||
raise SynapseError(400, "No value specified for 'm.join_policy'")
|
||||
|
||||
yield self.store.set_group_join_policy(group_id, join_policy=join_policy)
|
||||
|
||||
|
@ -237,9 +236,7 @@ class GroupsServerHandler(object):
|
|||
"""
|
||||
yield self.check_group_is_ours(group_id, requester_user_id, and_exists=True)
|
||||
|
||||
categories = yield self.store.get_group_categories(
|
||||
group_id=group_id,
|
||||
)
|
||||
categories = yield self.store.get_group_categories(group_id=group_id)
|
||||
defer.returnValue({"categories": categories})
|
||||
|
||||
@defer.inlineCallbacks
|
||||
|
@ -249,8 +246,7 @@ class GroupsServerHandler(object):
|
|||
yield self.check_group_is_ours(group_id, requester_user_id, and_exists=True)
|
||||
|
||||
res = yield self.store.get_group_category(
|
||||
group_id=group_id,
|
||||
category_id=category_id,
|
||||
group_id=group_id, category_id=category_id
|
||||
)
|
||||
|
||||
defer.returnValue(res)
|
||||
|
@ -260,10 +256,7 @@ class GroupsServerHandler(object):
|
|||
"""Add/Update a group category
|
||||
"""
|
||||
yield self.check_group_is_ours(
|
||||
group_id,
|
||||
requester_user_id,
|
||||
and_exists=True,
|
||||
and_is_admin=requester_user_id,
|
||||
group_id, requester_user_id, and_exists=True, and_is_admin=requester_user_id
|
||||
)
|
||||
|
||||
is_public = _parse_visibility_from_contents(content)
|
||||
|
@ -283,15 +276,11 @@ class GroupsServerHandler(object):
|
|||
"""Delete a group category
|
||||
"""
|
||||
yield self.check_group_is_ours(
|
||||
group_id,
|
||||
requester_user_id,
|
||||
and_exists=True,
|
||||
and_is_admin=requester_user_id
|
||||
group_id, requester_user_id, and_exists=True, and_is_admin=requester_user_id
|
||||
)
|
||||
|
||||
yield self.store.remove_group_category(
|
||||
group_id=group_id,
|
||||
category_id=category_id,
|
||||
group_id=group_id, category_id=category_id
|
||||
)
|
||||
|
||||
defer.returnValue({})
|
||||
|
@ -302,9 +291,7 @@ class GroupsServerHandler(object):
|
|||
"""
|
||||
yield self.check_group_is_ours(group_id, requester_user_id, and_exists=True)
|
||||
|
||||
roles = yield self.store.get_group_roles(
|
||||
group_id=group_id,
|
||||
)
|
||||
roles = yield self.store.get_group_roles(group_id=group_id)
|
||||
defer.returnValue({"roles": roles})
|
||||
|
||||
@defer.inlineCallbacks
|
||||
|
@ -313,10 +300,7 @@ class GroupsServerHandler(object):
|
|||
"""
|
||||
yield self.check_group_is_ours(group_id, requester_user_id, and_exists=True)
|
||||
|
||||
res = yield self.store.get_group_role(
|
||||
group_id=group_id,
|
||||
role_id=role_id,
|
||||
)
|
||||
res = yield self.store.get_group_role(group_id=group_id, role_id=role_id)
|
||||
defer.returnValue(res)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
|
@ -324,10 +308,7 @@ class GroupsServerHandler(object):
|
|||
"""Add/update a role in a group
|
||||
"""
|
||||
yield self.check_group_is_ours(
|
||||
group_id,
|
||||
requester_user_id,
|
||||
and_exists=True,
|
||||
and_is_admin=requester_user_id,
|
||||
group_id, requester_user_id, and_exists=True, and_is_admin=requester_user_id
|
||||
)
|
||||
|
||||
is_public = _parse_visibility_from_contents(content)
|
||||
|
@ -335,10 +316,7 @@ class GroupsServerHandler(object):
|
|||
profile = content.get("profile")
|
||||
|
||||
yield self.store.upsert_group_role(
|
||||
group_id=group_id,
|
||||
role_id=role_id,
|
||||
is_public=is_public,
|
||||
profile=profile,
|
||||
group_id=group_id, role_id=role_id, is_public=is_public, profile=profile
|
||||
)
|
||||
|
||||
defer.returnValue({})
|
||||
|
@ -348,26 +326,21 @@ class GroupsServerHandler(object):
|
|||
"""Remove role from group
|
||||
"""
|
||||
yield self.check_group_is_ours(
|
||||
group_id,
|
||||
requester_user_id,
|
||||
and_exists=True,
|
||||
and_is_admin=requester_user_id,
|
||||
group_id, requester_user_id, and_exists=True, and_is_admin=requester_user_id
|
||||
)
|
||||
|
||||
yield self.store.remove_group_role(
|
||||
group_id=group_id,
|
||||
role_id=role_id,
|
||||
)
|
||||
yield self.store.remove_group_role(group_id=group_id, role_id=role_id)
|
||||
|
||||
defer.returnValue({})
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def update_group_summary_user(self, group_id, requester_user_id, user_id, role_id,
|
||||
content):
|
||||
def update_group_summary_user(
|
||||
self, group_id, requester_user_id, user_id, role_id, content
|
||||
):
|
||||
"""Add/update a users entry in the group summary
|
||||
"""
|
||||
yield self.check_group_is_ours(
|
||||
group_id, requester_user_id, and_exists=True, and_is_admin=requester_user_id,
|
||||
group_id, requester_user_id, and_exists=True, and_is_admin=requester_user_id
|
||||
)
|
||||
|
||||
order = content.get("order", None)
|
||||
|
@ -389,13 +362,11 @@ class GroupsServerHandler(object):
|
|||
"""Remove a user from the group summary
|
||||
"""
|
||||
yield self.check_group_is_ours(
|
||||
group_id, requester_user_id, and_exists=True, and_is_admin=requester_user_id,
|
||||
group_id, requester_user_id, and_exists=True, and_is_admin=requester_user_id
|
||||
)
|
||||
|
||||
yield self.store.remove_user_from_summary(
|
||||
group_id=group_id,
|
||||
user_id=user_id,
|
||||
role_id=role_id,
|
||||
group_id=group_id, user_id=user_id, role_id=role_id
|
||||
)
|
||||
|
||||
defer.returnValue({})
|
||||
|
@ -411,8 +382,11 @@ class GroupsServerHandler(object):
|
|||
|
||||
if group:
|
||||
cols = [
|
||||
"name", "short_description", "long_description",
|
||||
"avatar_url", "is_public",
|
||||
"name",
|
||||
"short_description",
|
||||
"long_description",
|
||||
"avatar_url",
|
||||
"is_public",
|
||||
]
|
||||
group_description = {key: group[key] for key in cols}
|
||||
group_description["is_openly_joinable"] = group["join_policy"] == "open"
|
||||
|
@ -426,12 +400,11 @@ class GroupsServerHandler(object):
|
|||
"""Update the group profile
|
||||
"""
|
||||
yield self.check_group_is_ours(
|
||||
group_id, requester_user_id, and_exists=True, and_is_admin=requester_user_id,
|
||||
group_id, requester_user_id, and_exists=True, and_is_admin=requester_user_id
|
||||
)
|
||||
|
||||
profile = {}
|
||||
for keyname in ("name", "avatar_url", "short_description",
|
||||
"long_description"):
|
||||
for keyname in ("name", "avatar_url", "short_description", "long_description"):
|
||||
if keyname in content:
|
||||
value = content[keyname]
|
||||
if not isinstance(value, string_types):
|
||||
|
@ -449,10 +422,12 @@ class GroupsServerHandler(object):
|
|||
|
||||
yield self.check_group_is_ours(group_id, requester_user_id, and_exists=True)
|
||||
|
||||
is_user_in_group = yield self.store.is_user_in_group(requester_user_id, group_id)
|
||||
is_user_in_group = yield self.store.is_user_in_group(
|
||||
requester_user_id, group_id
|
||||
)
|
||||
|
||||
user_results = yield self.store.get_users_in_group(
|
||||
group_id, include_private=is_user_in_group,
|
||||
group_id, include_private=is_user_in_group
|
||||
)
|
||||
|
||||
chunk = []
|
||||
|
@ -470,24 +445,25 @@ class GroupsServerHandler(object):
|
|||
entry["is_privileged"] = bool(is_privileged)
|
||||
|
||||
if not self.is_mine_id(g_user_id):
|
||||
attestation = yield self.store.get_remote_attestation(group_id, g_user_id)
|
||||
attestation = yield self.store.get_remote_attestation(
|
||||
group_id, g_user_id
|
||||
)
|
||||
if not attestation:
|
||||
continue
|
||||
|
||||
entry["attestation"] = attestation
|
||||
else:
|
||||
entry["attestation"] = self.attestations.create_attestation(
|
||||
group_id, g_user_id,
|
||||
group_id, g_user_id
|
||||
)
|
||||
|
||||
chunk.append(entry)
|
||||
|
||||
# TODO: If admin add lists of users whose attestations have timed out
|
||||
|
||||
defer.returnValue({
|
||||
"chunk": chunk,
|
||||
"total_user_count_estimate": len(user_results),
|
||||
})
|
||||
defer.returnValue(
|
||||
{"chunk": chunk, "total_user_count_estimate": len(user_results)}
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def get_invited_users_in_group(self, group_id, requester_user_id):
|
||||
|
@ -498,7 +474,9 @@ class GroupsServerHandler(object):
|
|||
|
||||
yield self.check_group_is_ours(group_id, requester_user_id, and_exists=True)
|
||||
|
||||
is_user_in_group = yield self.store.is_user_in_group(requester_user_id, group_id)
|
||||
is_user_in_group = yield self.store.is_user_in_group(
|
||||
requester_user_id, group_id
|
||||
)
|
||||
|
||||
if not is_user_in_group:
|
||||
raise SynapseError(403, "User not in group")
|
||||
|
@ -508,9 +486,7 @@ class GroupsServerHandler(object):
|
|||
user_profiles = []
|
||||
|
||||
for user_id in invited_users:
|
||||
user_profile = {
|
||||
"user_id": user_id
|
||||
}
|
||||
user_profile = {"user_id": user_id}
|
||||
try:
|
||||
profile = yield self.profile_handler.get_profile_from_cache(user_id)
|
||||
user_profile.update(profile)
|
||||
|
@ -518,10 +494,9 @@ class GroupsServerHandler(object):
|
|||
logger.warn("Error getting profile for %s: %s", user_id, e)
|
||||
user_profiles.append(user_profile)
|
||||
|
||||
defer.returnValue({
|
||||
"chunk": user_profiles,
|
||||
"total_user_count_estimate": len(invited_users),
|
||||
})
|
||||
defer.returnValue(
|
||||
{"chunk": user_profiles, "total_user_count_estimate": len(invited_users)}
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def get_rooms_in_group(self, group_id, requester_user_id):
|
||||
|
@ -532,10 +507,12 @@ class GroupsServerHandler(object):
|
|||
|
||||
yield self.check_group_is_ours(group_id, requester_user_id, and_exists=True)
|
||||
|
||||
is_user_in_group = yield self.store.is_user_in_group(requester_user_id, group_id)
|
||||
is_user_in_group = yield self.store.is_user_in_group(
|
||||
requester_user_id, group_id
|
||||
)
|
||||
|
||||
room_results = yield self.store.get_rooms_in_group(
|
||||
group_id, include_private=is_user_in_group,
|
||||
group_id, include_private=is_user_in_group
|
||||
)
|
||||
|
||||
chunk = []
|
||||
|
@ -544,7 +521,7 @@ class GroupsServerHandler(object):
|
|||
|
||||
joined_users = yield self.store.get_users_in_room(room_id)
|
||||
entry = yield self.room_list_handler.generate_room_entry(
|
||||
room_id, len(joined_users), with_alias=False, allow_private=True,
|
||||
room_id, len(joined_users), with_alias=False, allow_private=True
|
||||
)
|
||||
|
||||
if not entry:
|
||||
|
@ -556,10 +533,9 @@ class GroupsServerHandler(object):
|
|||
|
||||
chunk.sort(key=lambda e: -e["num_joined_members"])
|
||||
|
||||
defer.returnValue({
|
||||
"chunk": chunk,
|
||||
"total_room_count_estimate": len(room_results),
|
||||
})
|
||||
defer.returnValue(
|
||||
{"chunk": chunk, "total_room_count_estimate": len(room_results)}
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def add_room_to_group(self, group_id, requester_user_id, room_id, content):
|
||||
|
@ -578,8 +554,9 @@ class GroupsServerHandler(object):
|
|||
defer.returnValue({})
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def update_room_in_group(self, group_id, requester_user_id, room_id, config_key,
|
||||
content):
|
||||
def update_room_in_group(
|
||||
self, group_id, requester_user_id, room_id, config_key, content
|
||||
):
|
||||
"""Update room in group
|
||||
"""
|
||||
RoomID.from_string(room_id) # Ensure valid room id
|
||||
|
@ -592,8 +569,7 @@ class GroupsServerHandler(object):
|
|||
is_public = _parse_visibility_dict(content)
|
||||
|
||||
yield self.store.update_room_in_group_visibility(
|
||||
group_id, room_id,
|
||||
is_public=is_public,
|
||||
group_id, room_id, is_public=is_public
|
||||
)
|
||||
else:
|
||||
raise SynapseError(400, "Uknown config option")
|
||||
|
@ -625,10 +601,7 @@ class GroupsServerHandler(object):
|
|||
# TODO: Check if user is already invited
|
||||
|
||||
content = {
|
||||
"profile": {
|
||||
"name": group["name"],
|
||||
"avatar_url": group["avatar_url"],
|
||||
},
|
||||
"profile": {"name": group["name"], "avatar_url": group["avatar_url"]},
|
||||
"inviter": requester_user_id,
|
||||
}
|
||||
|
||||
|
@ -638,9 +611,7 @@ class GroupsServerHandler(object):
|
|||
local_attestation = None
|
||||
else:
|
||||
local_attestation = self.attestations.create_attestation(group_id, user_id)
|
||||
content.update({
|
||||
"attestation": local_attestation,
|
||||
})
|
||||
content.update({"attestation": local_attestation})
|
||||
|
||||
res = yield self.transport_client.invite_to_group_notification(
|
||||
get_domain_from_id(user_id), group_id, user_id, content
|
||||
|
@ -658,31 +629,24 @@ class GroupsServerHandler(object):
|
|||
remote_attestation = res["attestation"]
|
||||
|
||||
yield self.attestations.verify_attestation(
|
||||
remote_attestation,
|
||||
user_id=user_id,
|
||||
group_id=group_id,
|
||||
remote_attestation, user_id=user_id, group_id=group_id
|
||||
)
|
||||
else:
|
||||
remote_attestation = None
|
||||
|
||||
yield self.store.add_user_to_group(
|
||||
group_id, user_id,
|
||||
group_id,
|
||||
user_id,
|
||||
is_admin=False,
|
||||
is_public=False, # TODO
|
||||
local_attestation=local_attestation,
|
||||
remote_attestation=remote_attestation,
|
||||
)
|
||||
elif res["state"] == "invite":
|
||||
yield self.store.add_group_invite(
|
||||
group_id, user_id,
|
||||
)
|
||||
defer.returnValue({
|
||||
"state": "invite"
|
||||
})
|
||||
yield self.store.add_group_invite(group_id, user_id)
|
||||
defer.returnValue({"state": "invite"})
|
||||
elif res["state"] == "reject":
|
||||
defer.returnValue({
|
||||
"state": "reject"
|
||||
})
|
||||
defer.returnValue({"state": "reject"})
|
||||
else:
|
||||
raise SynapseError(502, "Unknown state returned by HS")
|
||||
|
||||
|
@ -693,16 +657,12 @@ class GroupsServerHandler(object):
|
|||
See accept_invite, join_group.
|
||||
"""
|
||||
if not self.hs.is_mine_id(user_id):
|
||||
local_attestation = self.attestations.create_attestation(
|
||||
group_id, user_id,
|
||||
)
|
||||
local_attestation = self.attestations.create_attestation(group_id, user_id)
|
||||
|
||||
remote_attestation = content["attestation"]
|
||||
|
||||
yield self.attestations.verify_attestation(
|
||||
remote_attestation,
|
||||
user_id=user_id,
|
||||
group_id=group_id,
|
||||
remote_attestation, user_id=user_id, group_id=group_id
|
||||
)
|
||||
else:
|
||||
local_attestation = None
|
||||
|
@ -711,7 +671,8 @@ class GroupsServerHandler(object):
|
|||
is_public = _parse_visibility_from_contents(content)
|
||||
|
||||
yield self.store.add_user_to_group(
|
||||
group_id, user_id,
|
||||
group_id,
|
||||
user_id,
|
||||
is_admin=False,
|
||||
is_public=is_public,
|
||||
local_attestation=local_attestation,
|
||||
|
@ -731,17 +692,14 @@ class GroupsServerHandler(object):
|
|||
yield self.check_group_is_ours(group_id, requester_user_id, and_exists=True)
|
||||
|
||||
is_invited = yield self.store.is_user_invited_to_local_group(
|
||||
group_id, requester_user_id,
|
||||
group_id, requester_user_id
|
||||
)
|
||||
if not is_invited:
|
||||
raise SynapseError(403, "User not invited to group")
|
||||
|
||||
local_attestation = yield self._add_user(group_id, requester_user_id, content)
|
||||
|
||||
defer.returnValue({
|
||||
"state": "join",
|
||||
"attestation": local_attestation,
|
||||
})
|
||||
defer.returnValue({"state": "join", "attestation": local_attestation})
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def join_group(self, group_id, requester_user_id, content):
|
||||
|
@ -753,15 +711,12 @@ class GroupsServerHandler(object):
|
|||
group_info = yield self.check_group_is_ours(
|
||||
group_id, requester_user_id, and_exists=True
|
||||
)
|
||||
if group_info['join_policy'] != "open":
|
||||
if group_info["join_policy"] != "open":
|
||||
raise SynapseError(403, "Group is not publicly joinable")
|
||||
|
||||
local_attestation = yield self._add_user(group_id, requester_user_id, content)
|
||||
|
||||
defer.returnValue({
|
||||
"state": "join",
|
||||
"attestation": local_attestation,
|
||||
})
|
||||
defer.returnValue({"state": "join", "attestation": local_attestation})
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def knock(self, group_id, requester_user_id, content):
|
||||
|
@ -800,9 +755,7 @@ class GroupsServerHandler(object):
|
|||
|
||||
is_kick = True
|
||||
|
||||
yield self.store.remove_user_from_group(
|
||||
group_id, user_id,
|
||||
)
|
||||
yield self.store.remove_user_from_group(group_id, user_id)
|
||||
|
||||
if is_kick:
|
||||
if self.hs.is_mine_id(user_id):
|
||||
|
@ -830,19 +783,20 @@ class GroupsServerHandler(object):
|
|||
if group:
|
||||
raise SynapseError(400, "Group already exists")
|
||||
|
||||
is_admin = yield self.auth.is_server_admin(UserID.from_string(requester_user_id))
|
||||
is_admin = yield self.auth.is_server_admin(
|
||||
UserID.from_string(requester_user_id)
|
||||
)
|
||||
if not is_admin:
|
||||
if not self.hs.config.enable_group_creation:
|
||||
raise SynapseError(
|
||||
403, "Only a server admin can create groups on this server",
|
||||
403, "Only a server admin can create groups on this server"
|
||||
)
|
||||
localpart = group_id_obj.localpart
|
||||
if not localpart.startswith(self.hs.config.group_creation_prefix):
|
||||
raise SynapseError(
|
||||
400,
|
||||
"Can only create groups with prefix %r on this server" % (
|
||||
self.hs.config.group_creation_prefix,
|
||||
),
|
||||
"Can only create groups with prefix %r on this server"
|
||||
% (self.hs.config.group_creation_prefix,),
|
||||
)
|
||||
|
||||
profile = content.get("profile", {})
|
||||
|
@ -865,21 +819,19 @@ class GroupsServerHandler(object):
|
|||
remote_attestation = content["attestation"]
|
||||
|
||||
yield self.attestations.verify_attestation(
|
||||
remote_attestation,
|
||||
user_id=requester_user_id,
|
||||
group_id=group_id,
|
||||
remote_attestation, user_id=requester_user_id, group_id=group_id
|
||||
)
|
||||
|
||||
local_attestation = self.attestations.create_attestation(
|
||||
group_id,
|
||||
requester_user_id,
|
||||
group_id, requester_user_id
|
||||
)
|
||||
else:
|
||||
local_attestation = None
|
||||
remote_attestation = None
|
||||
|
||||
yield self.store.add_user_to_group(
|
||||
group_id, requester_user_id,
|
||||
group_id,
|
||||
requester_user_id,
|
||||
is_admin=True,
|
||||
is_public=True, # TODO
|
||||
local_attestation=local_attestation,
|
||||
|
@ -893,9 +845,7 @@ class GroupsServerHandler(object):
|
|||
avatar_url=user_profile.get("avatar_url"),
|
||||
)
|
||||
|
||||
defer.returnValue({
|
||||
"group_id": group_id,
|
||||
})
|
||||
defer.returnValue({"group_id": group_id})
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def delete_group(self, group_id, requester_user_id):
|
||||
|
@ -911,29 +861,22 @@ class GroupsServerHandler(object):
|
|||
Deferred
|
||||
"""
|
||||
|
||||
yield self.check_group_is_ours(
|
||||
group_id, requester_user_id,
|
||||
and_exists=True,
|
||||
)
|
||||
yield self.check_group_is_ours(group_id, requester_user_id, and_exists=True)
|
||||
|
||||
# Only server admins or group admins can delete groups.
|
||||
|
||||
is_admin = yield self.store.is_user_admin_in_group(
|
||||
group_id, requester_user_id
|
||||
)
|
||||
is_admin = yield self.store.is_user_admin_in_group(group_id, requester_user_id)
|
||||
|
||||
if not is_admin:
|
||||
is_admin = yield self.auth.is_server_admin(
|
||||
UserID.from_string(requester_user_id),
|
||||
UserID.from_string(requester_user_id)
|
||||
)
|
||||
|
||||
if not is_admin:
|
||||
raise SynapseError(403, "User is not an admin")
|
||||
|
||||
# Before deleting the group lets kick everyone out of it
|
||||
users = yield self.store.get_users_in_group(
|
||||
group_id, include_private=True,
|
||||
)
|
||||
users = yield self.store.get_users_in_group(group_id, include_private=True)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def _kick_user_from_group(user_id):
|
||||
|
@ -989,9 +932,7 @@ def _parse_join_policy_dict(join_policy_dict):
|
|||
return "invite"
|
||||
|
||||
if join_policy_type not in ("invite", "open"):
|
||||
raise SynapseError(
|
||||
400, "Synapse only supports 'invite'/'open' join rule"
|
||||
)
|
||||
raise SynapseError(400, "Synapse only supports 'invite'/'open' join rule")
|
||||
return join_policy_type
|
||||
|
||||
|
||||
|
@ -1018,7 +959,5 @@ def _parse_visibility_dict(visibility):
|
|||
return True
|
||||
|
||||
if vis_type not in ("public", "private"):
|
||||
raise SynapseError(
|
||||
400, "Synapse only supports 'public'/'private' visibility"
|
||||
)
|
||||
raise SynapseError(400, "Synapse only supports 'public'/'private' visibility")
|
||||
return vis_type == "public"
|
||||
|
|
|
@ -94,14 +94,15 @@ class BaseHandler(object):
|
|||
burst_count = self.hs.config.rc_message.burst_count
|
||||
|
||||
allowed, time_allowed = self.ratelimiter.can_do_action(
|
||||
user_id, time_now,
|
||||
user_id,
|
||||
time_now,
|
||||
rate_hz=messages_per_second,
|
||||
burst_count=burst_count,
|
||||
update=update,
|
||||
)
|
||||
if not allowed:
|
||||
raise LimitExceededError(
|
||||
retry_after_ms=int(1000 * (time_allowed - time_now)),
|
||||
retry_after_ms=int(1000 * (time_allowed - time_now))
|
||||
)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
|
@ -139,7 +140,7 @@ class BaseHandler(object):
|
|||
|
||||
if member_event.content["membership"] not in {
|
||||
Membership.JOIN,
|
||||
Membership.INVITE
|
||||
Membership.INVITE,
|
||||
}:
|
||||
continue
|
||||
|
||||
|
@ -156,8 +157,7 @@ class BaseHandler(object):
|
|||
# and having homeservers have their own users leave keeps more
|
||||
# of that decision-making and control local to the guest-having
|
||||
# homeserver.
|
||||
requester = synapse.types.create_requester(
|
||||
target_user, is_guest=True)
|
||||
requester = synapse.types.create_requester(target_user, is_guest=True)
|
||||
handler = self.hs.get_room_member_handler()
|
||||
yield handler.update_membership(
|
||||
requester,
|
||||
|
|
|
@ -20,7 +20,7 @@ class AccountDataEventSource(object):
|
|||
def __init__(self, hs):
|
||||
self.store = hs.get_datastore()
|
||||
|
||||
def get_current_key(self, direction='f'):
|
||||
def get_current_key(self, direction="f"):
|
||||
return self.store.get_max_account_data_stream_id()
|
||||
|
||||
@defer.inlineCallbacks
|
||||
|
@ -34,29 +34,22 @@ class AccountDataEventSource(object):
|
|||
tags = yield self.store.get_updated_tags(user_id, last_stream_id)
|
||||
|
||||
for room_id, room_tags in tags.items():
|
||||
results.append({
|
||||
"type": "m.tag",
|
||||
"content": {"tags": room_tags},
|
||||
"room_id": room_id,
|
||||
})
|
||||
results.append(
|
||||
{"type": "m.tag", "content": {"tags": room_tags}, "room_id": room_id}
|
||||
)
|
||||
|
||||
account_data, room_account_data = (
|
||||
yield self.store.get_updated_account_data_for_user(user_id, last_stream_id)
|
||||
)
|
||||
|
||||
for account_data_type, content in account_data.items():
|
||||
results.append({
|
||||
"type": account_data_type,
|
||||
"content": content,
|
||||
})
|
||||
results.append({"type": account_data_type, "content": content})
|
||||
|
||||
for room_id, account_data in room_account_data.items():
|
||||
for account_data_type, content in account_data.items():
|
||||
results.append({
|
||||
"type": account_data_type,
|
||||
"content": content,
|
||||
"room_id": room_id,
|
||||
})
|
||||
results.append(
|
||||
{"type": account_data_type, "content": content, "room_id": room_id}
|
||||
)
|
||||
|
||||
defer.returnValue((results, current_stream_id))
|
||||
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue