mirror of
https://github.com/element-hq/synapse.git
synced 2024-11-24 02:25:45 +03:00
Run black
on the scripts (#9981)
Turns out these scripts weren't getting linted.
This commit is contained in:
parent
5090f26b63
commit
6482075c95
8 changed files with 141 additions and 76 deletions
1
changelog.d/9981.misc
Normal file
1
changelog.d/9981.misc
Normal file
|
@ -0,0 +1 @@
|
|||
Run `black` on files in the `scripts` directory.
|
|
@ -21,18 +21,18 @@ DISTS = (
|
|||
"debian:buster",
|
||||
"debian:bullseye",
|
||||
"debian:sid",
|
||||
"ubuntu:bionic", # 18.04 LTS (our EOL forced by Py36 on 2021-12-23)
|
||||
"ubuntu:focal", # 20.04 LTS (our EOL forced by Py38 on 2024-10-14)
|
||||
"ubuntu:groovy", # 20.10 (EOL 2021-07-07)
|
||||
"ubuntu:bionic", # 18.04 LTS (our EOL forced by Py36 on 2021-12-23)
|
||||
"ubuntu:focal", # 20.04 LTS (our EOL forced by Py38 on 2024-10-14)
|
||||
"ubuntu:groovy", # 20.10 (EOL 2021-07-07)
|
||||
"ubuntu:hirsute", # 21.04 (EOL 2022-01-05)
|
||||
)
|
||||
|
||||
DESC = '''\
|
||||
DESC = """\
|
||||
Builds .debs for synapse, using a Docker image for the build environment.
|
||||
|
||||
By default, builds for all known distributions, but a list of distributions
|
||||
can be passed on the commandline for debugging.
|
||||
'''
|
||||
"""
|
||||
|
||||
|
||||
class Builder(object):
|
||||
|
@ -46,7 +46,7 @@ class Builder(object):
|
|||
"""Build deb for a single distribution"""
|
||||
|
||||
if self._failed:
|
||||
print("not building %s due to earlier failure" % (dist, ))
|
||||
print("not building %s due to earlier failure" % (dist,))
|
||||
raise Exception("failed")
|
||||
|
||||
try:
|
||||
|
@ -68,48 +68,65 @@ class Builder(object):
|
|||
# we tend to get source packages which are full of debs. (We could hack
|
||||
# around that with more magic in the build_debian.sh script, but that
|
||||
# doesn't solve the problem for natively-run dpkg-buildpakage).
|
||||
debsdir = os.path.join(projdir, '../debs')
|
||||
debsdir = os.path.join(projdir, "../debs")
|
||||
os.makedirs(debsdir, exist_ok=True)
|
||||
|
||||
if self.redirect_stdout:
|
||||
logfile = os.path.join(debsdir, "%s.buildlog" % (tag, ))
|
||||
logfile = os.path.join(debsdir, "%s.buildlog" % (tag,))
|
||||
print("building %s: directing output to %s" % (dist, logfile))
|
||||
stdout = open(logfile, "w")
|
||||
else:
|
||||
stdout = None
|
||||
|
||||
# first build a docker image for the build environment
|
||||
subprocess.check_call([
|
||||
"docker", "build",
|
||||
"--tag", "dh-venv-builder:" + tag,
|
||||
"--build-arg", "distro=" + dist,
|
||||
"-f", "docker/Dockerfile-dhvirtualenv",
|
||||
"docker",
|
||||
], stdout=stdout, stderr=subprocess.STDOUT)
|
||||
subprocess.check_call(
|
||||
[
|
||||
"docker",
|
||||
"build",
|
||||
"--tag",
|
||||
"dh-venv-builder:" + tag,
|
||||
"--build-arg",
|
||||
"distro=" + dist,
|
||||
"-f",
|
||||
"docker/Dockerfile-dhvirtualenv",
|
||||
"docker",
|
||||
],
|
||||
stdout=stdout,
|
||||
stderr=subprocess.STDOUT,
|
||||
)
|
||||
|
||||
container_name = "synapse_build_" + tag
|
||||
with self._lock:
|
||||
self.active_containers.add(container_name)
|
||||
|
||||
# then run the build itself
|
||||
subprocess.check_call([
|
||||
"docker", "run",
|
||||
"--rm",
|
||||
"--name", container_name,
|
||||
"--volume=" + projdir + ":/synapse/source:ro",
|
||||
"--volume=" + debsdir + ":/debs",
|
||||
"-e", "TARGET_USERID=%i" % (os.getuid(), ),
|
||||
"-e", "TARGET_GROUPID=%i" % (os.getgid(), ),
|
||||
"-e", "DEB_BUILD_OPTIONS=%s" % ("nocheck" if skip_tests else ""),
|
||||
"dh-venv-builder:" + tag,
|
||||
], stdout=stdout, stderr=subprocess.STDOUT)
|
||||
subprocess.check_call(
|
||||
[
|
||||
"docker",
|
||||
"run",
|
||||
"--rm",
|
||||
"--name",
|
||||
container_name,
|
||||
"--volume=" + projdir + ":/synapse/source:ro",
|
||||
"--volume=" + debsdir + ":/debs",
|
||||
"-e",
|
||||
"TARGET_USERID=%i" % (os.getuid(),),
|
||||
"-e",
|
||||
"TARGET_GROUPID=%i" % (os.getgid(),),
|
||||
"-e",
|
||||
"DEB_BUILD_OPTIONS=%s" % ("nocheck" if skip_tests else ""),
|
||||
"dh-venv-builder:" + tag,
|
||||
],
|
||||
stdout=stdout,
|
||||
stderr=subprocess.STDOUT,
|
||||
)
|
||||
|
||||
with self._lock:
|
||||
self.active_containers.remove(container_name)
|
||||
|
||||
if stdout is not None:
|
||||
stdout.close()
|
||||
print("Completed build of %s" % (dist, ))
|
||||
print("Completed build of %s" % (dist,))
|
||||
|
||||
def kill_containers(self):
|
||||
with self._lock:
|
||||
|
@ -117,9 +134,14 @@ class Builder(object):
|
|||
|
||||
for c in active:
|
||||
print("killing container %s" % (c,))
|
||||
subprocess.run([
|
||||
"docker", "kill", c,
|
||||
], stdout=subprocess.DEVNULL)
|
||||
subprocess.run(
|
||||
[
|
||||
"docker",
|
||||
"kill",
|
||||
c,
|
||||
],
|
||||
stdout=subprocess.DEVNULL,
|
||||
)
|
||||
with self._lock:
|
||||
self.active_containers.remove(c)
|
||||
|
||||
|
@ -130,31 +152,38 @@ def run_builds(dists, jobs=1, skip_tests=False):
|
|||
def sig(signum, _frame):
|
||||
print("Caught SIGINT")
|
||||
builder.kill_containers()
|
||||
|
||||
signal.signal(signal.SIGINT, sig)
|
||||
|
||||
with ThreadPoolExecutor(max_workers=jobs) as e:
|
||||
res = e.map(lambda dist: builder.run_build(dist, skip_tests), dists)
|
||||
|
||||
# make sure we consume the iterable so that exceptions are raised.
|
||||
for r in res:
|
||||
for _ in res:
|
||||
pass
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(
|
||||
description=DESC,
|
||||
)
|
||||
parser.add_argument(
|
||||
'-j', '--jobs', type=int, default=1,
|
||||
help='specify the number of builds to run in parallel',
|
||||
"-j",
|
||||
"--jobs",
|
||||
type=int,
|
||||
default=1,
|
||||
help="specify the number of builds to run in parallel",
|
||||
)
|
||||
parser.add_argument(
|
||||
'--no-check', action='store_true',
|
||||
help='skip running tests after building',
|
||||
"--no-check",
|
||||
action="store_true",
|
||||
help="skip running tests after building",
|
||||
)
|
||||
parser.add_argument(
|
||||
'dist', nargs='*', default=DISTS,
|
||||
help='a list of distributions to build for. Default: %(default)s',
|
||||
"dist",
|
||||
nargs="*",
|
||||
default=DISTS,
|
||||
help="a list of distributions to build for. Default: %(default)s",
|
||||
)
|
||||
args = parser.parse_args()
|
||||
run_builds(dists=args.dist, jobs=args.jobs, skip_tests=args.no_check)
|
||||
|
|
|
@ -80,8 +80,22 @@ else
|
|||
# then lint everything!
|
||||
if [[ -z ${files+x} ]]; then
|
||||
# Lint all source code files and directories
|
||||
# Note: this list aims the mirror the one in tox.ini
|
||||
files=("synapse" "docker" "tests" "scripts-dev" "scripts" "contrib" "synctl" "setup.py" "synmark" "stubs" ".buildkite")
|
||||
# Note: this list aims to mirror the one in tox.ini
|
||||
files=(
|
||||
"synapse" "docker" "tests"
|
||||
# annoyingly, black doesn't find these so we have to list them
|
||||
"scripts/export_signing_key"
|
||||
"scripts/generate_config"
|
||||
"scripts/generate_log_config"
|
||||
"scripts/hash_password"
|
||||
"scripts/register_new_matrix_user"
|
||||
"scripts/synapse_port_db"
|
||||
"scripts-dev"
|
||||
"scripts-dev/build_debian_packages"
|
||||
"scripts-dev/sign_json"
|
||||
"scripts-dev/update_database"
|
||||
"contrib" "synctl" "setup.py" "synmark" "stubs" ".buildkite"
|
||||
)
|
||||
fi
|
||||
fi
|
||||
|
||||
|
|
|
@ -30,7 +30,11 @@ def exit(status: int = 0, message: Optional[str] = None):
|
|||
def format_plain(public_key: nacl.signing.VerifyKey):
|
||||
print(
|
||||
"%s:%s %s"
|
||||
% (public_key.alg, public_key.version, encode_verify_key_base64(public_key),)
|
||||
% (
|
||||
public_key.alg,
|
||||
public_key.version,
|
||||
encode_verify_key_base64(public_key),
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
|
@ -50,7 +54,10 @@ if __name__ == "__main__":
|
|||
parser = argparse.ArgumentParser()
|
||||
|
||||
parser.add_argument(
|
||||
"key_file", nargs="+", type=argparse.FileType("r"), help="The key file to read",
|
||||
"key_file",
|
||||
nargs="+",
|
||||
type=argparse.FileType("r"),
|
||||
help="The key file to read",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
|
@ -63,7 +70,7 @@ if __name__ == "__main__":
|
|||
parser.add_argument(
|
||||
"--expiry-ts",
|
||||
type=int,
|
||||
default=int(time.time() * 1000) + 6*3600000,
|
||||
default=int(time.time() * 1000) + 6 * 3600000,
|
||||
help=(
|
||||
"The expiry time to use for -x, in milliseconds since 1970. The default "
|
||||
"is (now+6h)."
|
||||
|
|
|
@ -11,23 +11,22 @@ if __name__ == "__main__":
|
|||
parser.add_argument(
|
||||
"--config-dir",
|
||||
default="CONFDIR",
|
||||
|
||||
help="The path where the config files are kept. Used to create filenames for "
|
||||
"things like the log config and the signing key. Default: %(default)s",
|
||||
"things like the log config and the signing key. Default: %(default)s",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--data-dir",
|
||||
default="DATADIR",
|
||||
help="The path where the data files are kept. Used to create filenames for "
|
||||
"things like the database and media store. Default: %(default)s",
|
||||
"things like the database and media store. Default: %(default)s",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--server-name",
|
||||
default="SERVERNAME",
|
||||
help="The server name. Used to initialise the server_name config param, but also "
|
||||
"used in the names of some of the config files. Default: %(default)s",
|
||||
"used in the names of some of the config files. Default: %(default)s",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
|
@ -41,21 +40,22 @@ if __name__ == "__main__":
|
|||
"--generate-secrets",
|
||||
action="store_true",
|
||||
help="Enable generation of new secrets for things like the macaroon_secret_key."
|
||||
"By default, these parameters will be left unset."
|
||||
"By default, these parameters will be left unset.",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"-o", "--output-file",
|
||||
type=argparse.FileType('w'),
|
||||
"-o",
|
||||
"--output-file",
|
||||
type=argparse.FileType("w"),
|
||||
default=sys.stdout,
|
||||
help="File to write the configuration to. Default: stdout",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--header-file",
|
||||
type=argparse.FileType('r'),
|
||||
type=argparse.FileType("r"),
|
||||
help="File from which to read a header, which will be printed before the "
|
||||
"generated config.",
|
||||
"generated config.",
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
|
|
@ -41,7 +41,7 @@ if __name__ == "__main__":
|
|||
parser.add_argument(
|
||||
"-c",
|
||||
"--config",
|
||||
type=argparse.FileType('r'),
|
||||
type=argparse.FileType("r"),
|
||||
help=(
|
||||
"Path to server config file. "
|
||||
"Used to read in bcrypt_rounds and password_pepper."
|
||||
|
@ -72,8 +72,8 @@ if __name__ == "__main__":
|
|||
pw = unicodedata.normalize("NFKC", password)
|
||||
|
||||
hashed = bcrypt.hashpw(
|
||||
pw.encode('utf8') + password_pepper.encode("utf8"),
|
||||
pw.encode("utf8") + password_pepper.encode("utf8"),
|
||||
bcrypt.gensalt(bcrypt_rounds),
|
||||
).decode('ascii')
|
||||
).decode("ascii")
|
||||
|
||||
print(hashed)
|
||||
|
|
|
@ -294,8 +294,7 @@ class Porter(object):
|
|||
return table, already_ported, total_to_port, forward_chunk, backward_chunk
|
||||
|
||||
async def get_table_constraints(self) -> Dict[str, Set[str]]:
|
||||
"""Returns a map of tables that have foreign key constraints to tables they depend on.
|
||||
"""
|
||||
"""Returns a map of tables that have foreign key constraints to tables they depend on."""
|
||||
|
||||
def _get_constraints(txn):
|
||||
# We can pull the information about foreign key constraints out from
|
||||
|
@ -504,7 +503,9 @@ class Porter(object):
|
|||
return
|
||||
|
||||
def build_db_store(
|
||||
self, db_config: DatabaseConnectionConfig, allow_outdated_version: bool = False,
|
||||
self,
|
||||
db_config: DatabaseConnectionConfig,
|
||||
allow_outdated_version: bool = False,
|
||||
):
|
||||
"""Builds and returns a database store using the provided configuration.
|
||||
|
||||
|
@ -740,7 +741,7 @@ class Porter(object):
|
|||
return col
|
||||
|
||||
outrows = []
|
||||
for i, row in enumerate(rows):
|
||||
for row in rows:
|
||||
try:
|
||||
outrows.append(
|
||||
tuple(conv(j, col) for j, col in enumerate(row) if j > 0)
|
||||
|
@ -890,8 +891,7 @@ class Porter(object):
|
|||
await self.postgres_store.db_pool.runInteraction("setup_user_id_seq", r)
|
||||
|
||||
async def _setup_events_stream_seqs(self) -> None:
|
||||
"""Set the event stream sequences to the correct values.
|
||||
"""
|
||||
"""Set the event stream sequences to the correct values."""
|
||||
|
||||
# We get called before we've ported the events table, so we need to
|
||||
# fetch the current positions from the SQLite store.
|
||||
|
@ -920,12 +920,14 @@ class Porter(object):
|
|||
)
|
||||
|
||||
await self.postgres_store.db_pool.runInteraction(
|
||||
"_setup_events_stream_seqs", _setup_events_stream_seqs_set_pos,
|
||||
"_setup_events_stream_seqs",
|
||||
_setup_events_stream_seqs_set_pos,
|
||||
)
|
||||
|
||||
async def _setup_sequence(self, sequence_name: str, stream_id_tables: Iterable[str]) -> None:
|
||||
"""Set a sequence to the correct value.
|
||||
"""
|
||||
async def _setup_sequence(
|
||||
self, sequence_name: str, stream_id_tables: Iterable[str]
|
||||
) -> None:
|
||||
"""Set a sequence to the correct value."""
|
||||
current_stream_ids = []
|
||||
for stream_id_table in stream_id_tables:
|
||||
max_stream_id = await self.sqlite_store.db_pool.simple_select_one_onecol(
|
||||
|
@ -939,14 +941,19 @@ class Porter(object):
|
|||
next_id = max(current_stream_ids) + 1
|
||||
|
||||
def r(txn):
|
||||
sql = "ALTER SEQUENCE %s RESTART WITH" % (sequence_name, )
|
||||
txn.execute(sql + " %s", (next_id, ))
|
||||
sql = "ALTER SEQUENCE %s RESTART WITH" % (sequence_name,)
|
||||
txn.execute(sql + " %s", (next_id,))
|
||||
|
||||
await self.postgres_store.db_pool.runInteraction("_setup_%s" % (sequence_name,), r)
|
||||
await self.postgres_store.db_pool.runInteraction(
|
||||
"_setup_%s" % (sequence_name,), r
|
||||
)
|
||||
|
||||
async def _setup_auth_chain_sequence(self) -> None:
|
||||
curr_chain_id = await self.sqlite_store.db_pool.simple_select_one_onecol(
|
||||
table="event_auth_chains", keyvalues={}, retcol="MAX(chain_id)", allow_none=True
|
||||
table="event_auth_chains",
|
||||
keyvalues={},
|
||||
retcol="MAX(chain_id)",
|
||||
allow_none=True,
|
||||
)
|
||||
|
||||
def r(txn):
|
||||
|
@ -968,8 +975,7 @@ class Porter(object):
|
|||
|
||||
|
||||
class Progress(object):
|
||||
"""Used to report progress of the port
|
||||
"""
|
||||
"""Used to report progress of the port"""
|
||||
|
||||
def __init__(self):
|
||||
self.tables = {}
|
||||
|
@ -994,8 +1000,7 @@ class Progress(object):
|
|||
|
||||
|
||||
class CursesProgress(Progress):
|
||||
"""Reports progress to a curses window
|
||||
"""
|
||||
"""Reports progress to a curses window"""
|
||||
|
||||
def __init__(self, stdscr):
|
||||
self.stdscr = stdscr
|
||||
|
@ -1020,7 +1025,7 @@ class CursesProgress(Progress):
|
|||
|
||||
self.total_processed = 0
|
||||
self.total_remaining = 0
|
||||
for table, data in self.tables.items():
|
||||
for data in self.tables.values():
|
||||
self.total_processed += data["num_done"] - data["start"]
|
||||
self.total_remaining += data["total"] - data["num_done"]
|
||||
|
||||
|
@ -1111,8 +1116,7 @@ class CursesProgress(Progress):
|
|||
|
||||
|
||||
class TerminalProgress(Progress):
|
||||
"""Just prints progress to the terminal
|
||||
"""
|
||||
"""Just prints progress to the terminal"""
|
||||
|
||||
def update(self, table, num_done):
|
||||
super(TerminalProgress, self).update(table, num_done)
|
||||
|
|
10
tox.ini
10
tox.ini
|
@ -34,7 +34,17 @@ lint_targets =
|
|||
synapse
|
||||
tests
|
||||
scripts
|
||||
# annoyingly, black doesn't find these so we have to list them
|
||||
scripts/export_signing_key
|
||||
scripts/generate_config
|
||||
scripts/generate_log_config
|
||||
scripts/hash_password
|
||||
scripts/register_new_matrix_user
|
||||
scripts/synapse_port_db
|
||||
scripts-dev
|
||||
scripts-dev/build_debian_packages
|
||||
scripts-dev/sign_json
|
||||
scripts-dev/update_database
|
||||
stubs
|
||||
contrib
|
||||
synctl
|
||||
|
|
Loading…
Reference in a new issue