mirror of
https://github.com/element-hq/element-web
synced 2024-11-25 18:55:58 +03:00
Redeploy script: put each build in a separate directory (#2790)
* Redeploy script: put each build in a separate directory Hopefully this will fix the problem whereby we can overwrite the live deployment.
This commit is contained in:
parent
6d4abac016
commit
c6c4aff8ae
1 changed files with 28 additions and 9 deletions
|
@ -13,7 +13,7 @@ def download_file(url):
|
||||||
local_filename = url.split('/')[-1]
|
local_filename = url.split('/')[-1]
|
||||||
r = requests.get(url, stream=True)
|
r = requests.get(url, stream=True)
|
||||||
with open(local_filename, 'wb') as f:
|
with open(local_filename, 'wb') as f:
|
||||||
for chunk in r.iter_content(chunk_size=1024):
|
for chunk in r.iter_content(chunk_size=1024):
|
||||||
if chunk: # filter out keep-alive new chunks
|
if chunk: # filter out keep-alive new chunks
|
||||||
f.write(chunk)
|
f.write(chunk)
|
||||||
return local_filename
|
return local_filename
|
||||||
|
@ -107,21 +107,40 @@ def on_receive_jenkins_poke():
|
||||||
)
|
)
|
||||||
|
|
||||||
print("Retrieving .tar.gz file: %s" % tar_gz_url)
|
print("Retrieving .tar.gz file: %s" % tar_gz_url)
|
||||||
|
|
||||||
|
# we rely on the fact that flask only serves one request at a time to
|
||||||
|
# ensure that we do not overwrite a tarball from a concurrent request.
|
||||||
filename = download_file(tar_gz_url)
|
filename = download_file(tar_gz_url)
|
||||||
print("Downloaded file: %s" % filename)
|
print("Downloaded file: %s" % filename)
|
||||||
|
|
||||||
|
try:
|
||||||
|
# we extract into a directory based on the build number. This avoids the
|
||||||
|
# problem of multiple builds building the same git version and thus having
|
||||||
|
# the same tarball name. That would lead to two potential problems:
|
||||||
|
# (a) sometimes jenkins serves corrupted artifacts; we would replace
|
||||||
|
# a good deploy with a bad one
|
||||||
|
# (b) we'll be overwriting the live deployment, which means people might
|
||||||
|
# see half-written files.
|
||||||
|
build_dir = os.path.join(arg_extract_path, "%s-#%s" % (job_name, build_num))
|
||||||
|
if os.path.exists(build_dir):
|
||||||
|
abort(400, "Not deploying. We have previously deployed this build.")
|
||||||
|
return
|
||||||
|
os.mkdir(build_dir)
|
||||||
|
|
||||||
|
untar_to(filename, build_dir)
|
||||||
|
print("Extracted to: %s" % build_dir)
|
||||||
|
finally:
|
||||||
|
if arg_should_clean:
|
||||||
|
os.remove(filename)
|
||||||
|
|
||||||
name_str = filename.replace(".tar.gz", "")
|
name_str = filename.replace(".tar.gz", "")
|
||||||
untar_to(filename, arg_extract_path)
|
extracted_dir = os.path.join(build_dir, name_str)
|
||||||
|
|
||||||
extracted_dir = os.path.join(arg_extract_path, name_str)
|
|
||||||
|
|
||||||
if arg_should_clean:
|
|
||||||
os.remove(filename)
|
|
||||||
|
|
||||||
create_symlink(source=extracted_dir, linkname=arg_symlink)
|
|
||||||
|
|
||||||
if arg_config_location:
|
if arg_config_location:
|
||||||
create_symlink(source=arg_config_location, linkname=os.path.join(extracted_dir, 'config.json'))
|
create_symlink(source=arg_config_location, linkname=os.path.join(extracted_dir, 'config.json'))
|
||||||
|
|
||||||
|
create_symlink(source=extracted_dir, linkname=arg_symlink)
|
||||||
|
|
||||||
return jsonify({})
|
return jsonify({})
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|
Loading…
Reference in a new issue