2017-03-22 16:54:20 +03:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
# Copyright 2017 Vector Creations Ltd
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
|
|
|
import signedjson.key
|
2020-01-30 14:25:59 +03:00
|
|
|
import unpaddedbase64
|
2018-07-09 09:09:20 +03:00
|
|
|
|
2019-04-08 16:51:07 +03:00
|
|
|
from twisted.internet.defer import Deferred
|
|
|
|
|
2019-04-03 20:10:24 +03:00
|
|
|
from synapse.storage.keys import FetchKeyResult
|
|
|
|
|
2017-03-22 16:54:20 +03:00
|
|
|
import tests.unittest
|
|
|
|
|
2020-01-30 14:25:59 +03:00
|
|
|
|
|
|
|
def decode_verify_key_base64(key_id: str, key_base64: str):
|
|
|
|
key_bytes = unpaddedbase64.decode_base64(key_base64)
|
|
|
|
return signedjson.key.decode_verify_key_bytes(key_id, key_bytes)
|
|
|
|
|
|
|
|
|
|
|
|
KEY_1 = decode_verify_key_base64(
|
|
|
|
"ed25519:key1", "fP5l4JzpZPq/zdbBg5xx6lQGAAOM9/3w94cqiJ5jPrw"
|
2019-04-09 00:00:11 +03:00
|
|
|
)
|
2020-01-30 14:25:59 +03:00
|
|
|
KEY_2 = decode_verify_key_base64(
|
|
|
|
"ed25519:key2", "Noi6WqcDj0QmPxCNQqgezwTlBKrfqehY1u2FyWP9uYw"
|
2019-04-09 00:00:11 +03:00
|
|
|
)
|
2017-03-22 16:54:20 +03:00
|
|
|
|
|
|
|
|
2019-04-09 00:00:11 +03:00
|
|
|
class KeyStoreTestCase(tests.unittest.HomeserverTestCase):
|
2017-03-22 16:54:20 +03:00
|
|
|
def test_get_server_verify_keys(self):
|
2019-04-09 00:00:11 +03:00
|
|
|
store = self.hs.get_datastore()
|
|
|
|
|
2019-05-23 13:45:39 +03:00
|
|
|
key_id_1 = "ed25519:key1"
|
|
|
|
key_id_2 = "ed25519:KEY_ID_2"
|
|
|
|
d = store.store_server_verify_keys(
|
|
|
|
"from_server",
|
|
|
|
10,
|
|
|
|
[
|
2019-04-03 20:10:24 +03:00
|
|
|
("server1", key_id_1, FetchKeyResult(KEY_1, 100)),
|
|
|
|
("server1", key_id_2, FetchKeyResult(KEY_2, 200)),
|
2019-05-23 13:45:39 +03:00
|
|
|
],
|
|
|
|
)
|
2019-04-09 00:00:11 +03:00
|
|
|
self.get_success(d)
|
2017-03-22 16:54:20 +03:00
|
|
|
|
2019-04-09 00:00:11 +03:00
|
|
|
d = store.get_server_verify_keys(
|
2019-05-23 13:45:39 +03:00
|
|
|
[("server1", key_id_1), ("server1", key_id_2), ("server1", "ed25519:key3")]
|
2018-08-10 16:54:09 +03:00
|
|
|
)
|
2019-04-09 00:00:11 +03:00
|
|
|
res = self.get_success(d)
|
2017-03-22 16:54:20 +03:00
|
|
|
|
2019-04-08 16:51:07 +03:00
|
|
|
self.assertEqual(len(res.keys()), 3)
|
2019-05-23 13:45:39 +03:00
|
|
|
res1 = res[("server1", key_id_1)]
|
2019-04-03 20:10:24 +03:00
|
|
|
self.assertEqual(res1.verify_key, KEY_1)
|
|
|
|
self.assertEqual(res1.verify_key.version, "key1")
|
|
|
|
self.assertEqual(res1.valid_until_ts, 100)
|
2019-05-23 13:45:39 +03:00
|
|
|
|
|
|
|
res2 = res[("server1", key_id_2)]
|
2019-04-03 20:10:24 +03:00
|
|
|
self.assertEqual(res2.verify_key, KEY_2)
|
2019-05-23 13:45:39 +03:00
|
|
|
# version comes from the ID it was stored with
|
2019-04-03 20:10:24 +03:00
|
|
|
self.assertEqual(res2.verify_key.version, "KEY_ID_2")
|
|
|
|
self.assertEqual(res2.valid_until_ts, 200)
|
2019-04-08 16:51:07 +03:00
|
|
|
|
|
|
|
# non-existent result gives None
|
|
|
|
self.assertIsNone(res[("server1", "ed25519:key3")])
|
|
|
|
|
|
|
|
def test_cache(self):
|
|
|
|
"""Check that updates correctly invalidate the cache."""
|
|
|
|
|
|
|
|
store = self.hs.get_datastore()
|
|
|
|
|
|
|
|
key_id_1 = "ed25519:key1"
|
|
|
|
key_id_2 = "ed25519:key2"
|
|
|
|
|
2019-05-23 13:45:39 +03:00
|
|
|
d = store.store_server_verify_keys(
|
|
|
|
"from_server",
|
|
|
|
0,
|
|
|
|
[
|
2019-04-03 20:10:24 +03:00
|
|
|
("srv1", key_id_1, FetchKeyResult(KEY_1, 100)),
|
|
|
|
("srv1", key_id_2, FetchKeyResult(KEY_2, 200)),
|
2019-05-23 13:45:39 +03:00
|
|
|
],
|
|
|
|
)
|
2019-04-08 16:51:07 +03:00
|
|
|
self.get_success(d)
|
|
|
|
|
|
|
|
d = store.get_server_verify_keys([("srv1", key_id_1), ("srv1", key_id_2)])
|
|
|
|
res = self.get_success(d)
|
|
|
|
self.assertEqual(len(res.keys()), 2)
|
2019-04-03 20:10:24 +03:00
|
|
|
|
|
|
|
res1 = res[("srv1", key_id_1)]
|
|
|
|
self.assertEqual(res1.verify_key, KEY_1)
|
|
|
|
self.assertEqual(res1.valid_until_ts, 100)
|
|
|
|
|
|
|
|
res2 = res[("srv1", key_id_2)]
|
|
|
|
self.assertEqual(res2.verify_key, KEY_2)
|
|
|
|
self.assertEqual(res2.valid_until_ts, 200)
|
2019-04-08 16:51:07 +03:00
|
|
|
|
|
|
|
# we should be able to look up the same thing again without a db hit
|
|
|
|
res = store.get_server_verify_keys([("srv1", key_id_1)])
|
|
|
|
if isinstance(res, Deferred):
|
|
|
|
res = self.successResultOf(res)
|
|
|
|
self.assertEqual(len(res.keys()), 1)
|
2019-04-03 20:10:24 +03:00
|
|
|
self.assertEqual(res[("srv1", key_id_1)].verify_key, KEY_1)
|
2019-04-08 16:51:07 +03:00
|
|
|
|
|
|
|
new_key_2 = signedjson.key.get_verify_key(
|
|
|
|
signedjson.key.generate_signing_key("key2")
|
|
|
|
)
|
2019-05-23 13:45:39 +03:00
|
|
|
d = store.store_server_verify_keys(
|
2019-04-03 20:10:24 +03:00
|
|
|
"from_server", 10, [("srv1", key_id_2, FetchKeyResult(new_key_2, 300))]
|
2019-05-23 13:45:39 +03:00
|
|
|
)
|
2019-04-08 16:51:07 +03:00
|
|
|
self.get_success(d)
|
|
|
|
|
|
|
|
d = store.get_server_verify_keys([("srv1", key_id_1), ("srv1", key_id_2)])
|
|
|
|
res = self.get_success(d)
|
2017-03-22 16:54:20 +03:00
|
|
|
self.assertEqual(len(res.keys()), 2)
|
2019-04-03 20:10:24 +03:00
|
|
|
|
|
|
|
res1 = res[("srv1", key_id_1)]
|
|
|
|
self.assertEqual(res1.verify_key, KEY_1)
|
|
|
|
self.assertEqual(res1.valid_until_ts, 100)
|
|
|
|
|
|
|
|
res2 = res[("srv1", key_id_2)]
|
|
|
|
self.assertEqual(res2.verify_key, new_key_2)
|
|
|
|
self.assertEqual(res2.valid_until_ts, 300)
|