Port handlers/ to Python 3 (#3803)

This commit is contained in:
Amber Brown 2018-09-07 00:22:23 +10:00 committed by GitHub
parent 4f8baab0c4
commit 2608ebc04c
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
8 changed files with 24 additions and 19 deletions

1
.gitignore vendored
View file

@ -44,6 +44,7 @@ media_store/
build/ build/
venv/ venv/
venv*/ venv*/
*venv/
localhost-800*/ localhost-800*/
static/client/register/register_config.js static/client/register/register_config.js

1
changelog.d/3803.misc Normal file
View file

@ -0,0 +1 @@
handlers/ is now ported to Python 3.

View file

@ -895,22 +895,24 @@ class AuthHandler(BaseHandler):
Args: Args:
password (unicode): Password to hash. password (unicode): Password to hash.
stored_hash (unicode): Expected hash value. stored_hash (bytes): Expected hash value.
Returns: Returns:
Deferred(bool): Whether self.hash(password) == stored_hash. Deferred(bool): Whether self.hash(password) == stored_hash.
""" """
def _do_validate_hash(): def _do_validate_hash():
# Normalise the Unicode in the password # Normalise the Unicode in the password
pw = unicodedata.normalize("NFKC", password) pw = unicodedata.normalize("NFKC", password)
return bcrypt.checkpw( return bcrypt.checkpw(
pw.encode('utf8') + self.hs.config.password_pepper.encode("utf8"), pw.encode('utf8') + self.hs.config.password_pepper.encode("utf8"),
stored_hash.encode('utf8') stored_hash
) )
if stored_hash: if stored_hash:
if not isinstance(stored_hash, bytes):
stored_hash = stored_hash.encode('ascii')
return make_deferred_yieldable( return make_deferred_yieldable(
threads.deferToThreadPool( threads.deferToThreadPool(
self.hs.get_reactor(), self.hs.get_reactor(),

View file

@ -330,7 +330,8 @@ class E2eKeysHandler(object):
(algorithm, key_id, ex_json, key) (algorithm, key_id, ex_json, key)
) )
else: else:
new_keys.append((algorithm, key_id, encode_canonical_json(key))) new_keys.append((
algorithm, key_id, encode_canonical_json(key).decode('ascii')))
yield self.store.add_e2e_one_time_keys( yield self.store.add_e2e_one_time_keys(
user_id, device_id, time_now, new_keys user_id, device_id, time_now, new_keys
@ -358,7 +359,7 @@ def _exception_to_failure(e):
# Note that some Exceptions (notably twisted's ResponseFailed etc) don't # Note that some Exceptions (notably twisted's ResponseFailed etc) don't
# give a string for e.message, which json then fails to serialize. # give a string for e.message, which json then fails to serialize.
return { return {
"status": 503, "message": str(e.message), "status": 503, "message": str(e),
} }

View file

@ -594,7 +594,7 @@ class FederationHandler(BaseHandler):
required_auth = set( required_auth = set(
a_id a_id
for event in events + state_events.values() + auth_events.values() for event in events + list(state_events.values()) + list(auth_events.values())
for a_id, _ in event.auth_events for a_id, _ in event.auth_events
) )
auth_events.update({ auth_events.update({
@ -802,7 +802,7 @@ class FederationHandler(BaseHandler):
) )
continue continue
except NotRetryingDestination as e: except NotRetryingDestination as e:
logger.info(e.message) logger.info(str(e))
continue continue
except FederationDeniedError as e: except FederationDeniedError as e:
logger.info(e) logger.info(e)
@ -1358,7 +1358,7 @@ class FederationHandler(BaseHandler):
) )
if state_groups: if state_groups:
_, state = state_groups.items().pop() _, state = list(state_groups.items()).pop()
results = state results = state
if event.is_state(): if event.is_state():

View file

@ -162,7 +162,7 @@ class RoomListHandler(BaseHandler):
# Filter out rooms that we don't want to return # Filter out rooms that we don't want to return
rooms_to_scan = [ rooms_to_scan = [
r for r in sorted_rooms r for r in sorted_rooms
if r not in newly_unpublished and rooms_to_num_joined[room_id] > 0 if r not in newly_unpublished and rooms_to_num_joined[r] > 0
] ]
total_room_count = len(rooms_to_scan) total_room_count = len(rooms_to_scan)

View file

@ -54,7 +54,7 @@ class SearchHandler(BaseHandler):
batch_token = None batch_token = None
if batch: if batch:
try: try:
b = decode_base64(batch) b = decode_base64(batch).decode('ascii')
batch_group, batch_group_key, batch_token = b.split("\n") batch_group, batch_group_key, batch_token = b.split("\n")
assert batch_group is not None assert batch_group is not None
@ -258,18 +258,18 @@ class SearchHandler(BaseHandler):
# it returns more from the same group (if applicable) rather # it returns more from the same group (if applicable) rather
# than reverting to searching all results again. # than reverting to searching all results again.
if batch_group and batch_group_key: if batch_group and batch_group_key:
global_next_batch = encode_base64("%s\n%s\n%s" % ( global_next_batch = encode_base64(("%s\n%s\n%s" % (
batch_group, batch_group_key, pagination_token batch_group, batch_group_key, pagination_token
)) )).encode('ascii'))
else: else:
global_next_batch = encode_base64("%s\n%s\n%s" % ( global_next_batch = encode_base64(("%s\n%s\n%s" % (
"all", "", pagination_token "all", "", pagination_token
)) )).encode('ascii'))
for room_id, group in room_groups.items(): for room_id, group in room_groups.items():
group["next_batch"] = encode_base64("%s\n%s\n%s" % ( group["next_batch"] = encode_base64(("%s\n%s\n%s" % (
"room_id", room_id, pagination_token "room_id", room_id, pagination_token
)) )).encode('ascii'))
allowed_events.extend(room_events) allowed_events.extend(room_events)

View file

@ -545,7 +545,7 @@ class SyncHandler(object):
member_ids = { member_ids = {
state_key: event_id state_key: event_id
for (t, state_key), event_id in state_ids.iteritems() for (t, state_key), event_id in iteritems(state_ids)
if t == EventTypes.Member if t == EventTypes.Member
} }
name_id = state_ids.get((EventTypes.Name, '')) name_id = state_ids.get((EventTypes.Name, ''))
@ -774,7 +774,7 @@ class SyncHandler(object):
logger.debug("filtering state from %r...", state_ids) logger.debug("filtering state from %r...", state_ids)
state_ids = { state_ids = {
t: event_id t: event_id
for t, event_id in state_ids.iteritems() for t, event_id in iteritems(state_ids)
if cache.get(t[1]) != event_id if cache.get(t[1]) != event_id
} }
logger.debug("...to %r", state_ids) logger.debug("...to %r", state_ids)
@ -1753,7 +1753,7 @@ def _calculate_state(
if lazy_load_members: if lazy_load_members:
p_ids.difference_update( p_ids.difference_update(
e for t, e in timeline_start.iteritems() e for t, e in iteritems(timeline_start)
if t[0] == EventTypes.Member if t[0] == EventTypes.Member
) )