2016-02-03 11:26:24 +00:00
|
|
|
#!/usr/bin/env python
|
2017-01-17 13:55:33 +00:00
|
|
|
#
|
2020-09-30 12:00:52 +00:00
|
|
|
# auto-deploy script for https://develop.element.io
|
2017-01-17 13:55:33 +00:00
|
|
|
#
|
2019-04-26 14:41:20 +00:00
|
|
|
# Listens for buildkite webhook pokes (https://buildkite.com/docs/apis/webhooks)
|
|
|
|
# When it gets one, downloads the artifact from buildkite
|
2017-01-17 13:55:33 +00:00
|
|
|
# and deploys it as the new version.
|
|
|
|
#
|
|
|
|
# Requires the following python packages:
|
|
|
|
#
|
|
|
|
# - requests
|
|
|
|
# - flask
|
|
|
|
#
|
2016-02-03 11:26:24 +00:00
|
|
|
from __future__ import print_function
|
2022-08-09 12:23:41 +00:00
|
|
|
import requests, argparse, os, errno
|
2017-01-17 13:55:33 +00:00
|
|
|
import time
|
2017-07-20 10:25:19 +00:00
|
|
|
import traceback
|
2017-09-20 16:22:47 +00:00
|
|
|
import glob
|
2019-04-26 14:26:03 +00:00
|
|
|
import re
|
|
|
|
import shutil
|
2019-05-02 17:05:11 +00:00
|
|
|
import threading
|
|
|
|
from Queue import Queue
|
2017-01-17 22:25:02 +00:00
|
|
|
|
2016-02-03 11:26:24 +00:00
|
|
|
from flask import Flask, jsonify, request, abort
|
2017-01-17 13:55:33 +00:00
|
|
|
|
2017-01-17 22:25:02 +00:00
|
|
|
from deploy import Deployer, DeployException
|
|
|
|
|
2016-02-03 11:26:24 +00:00
|
|
|
app = Flask(__name__)
|
|
|
|
|
2017-01-17 22:25:02 +00:00
|
|
|
deployer = None
|
2017-01-17 13:55:33 +00:00
|
|
|
arg_extract_path = None
|
2019-04-26 14:40:23 +00:00
|
|
|
arg_webhook_token = None
|
2019-04-26 14:26:03 +00:00
|
|
|
arg_api_token = None
|
2016-02-03 11:26:24 +00:00
|
|
|
|
2019-05-02 17:05:11 +00:00
|
|
|
workQueue = Queue()
|
|
|
|
|
2016-02-03 12:00:17 +00:00
|
|
|
|
2019-04-26 14:26:03 +00:00
|
|
|
def req_headers():
|
|
|
|
return {
|
|
|
|
"Authorization": "Bearer %s" % (arg_api_token,),
|
|
|
|
}
|
|
|
|
|
2019-05-02 17:05:11 +00:00
|
|
|
# Buildkite considers a poke to have failed if it has to wait more than 10s for
|
|
|
|
# data (any data, not just the initial response) and it normally takes longer than
|
|
|
|
# that to download an artifact from buildkite. Apparently there is no way in flask
|
|
|
|
# to finish the response and then keep doing stuff, so instead this has to involve
|
|
|
|
# threading. Sigh.
|
|
|
|
def worker_thread():
|
|
|
|
while True:
|
|
|
|
toDeploy = workQueue.get()
|
|
|
|
deploy_buildkite_artifact(*toDeploy)
|
|
|
|
|
2016-02-03 11:26:24 +00:00
|
|
|
@app.route("/", methods=["POST"])
|
2019-04-26 14:26:03 +00:00
|
|
|
def on_receive_buildkite_poke():
|
|
|
|
got_webhook_token = request.headers.get('X-Buildkite-Token')
|
|
|
|
if got_webhook_token != arg_webbook_token:
|
|
|
|
print("Denying request with incorrect webhook token: %s" % (got_webhook_token,))
|
|
|
|
abort(400, "Incorrect webhook token")
|
|
|
|
return
|
|
|
|
|
|
|
|
required_api_prefix = None
|
2019-04-26 15:19:49 +00:00
|
|
|
if arg_buildkite_org is not None:
|
|
|
|
required_api_prefix = 'https://api.buildkite.com/v2/organizations/%s' % (arg_buildkite_org,)
|
2019-04-26 14:26:03 +00:00
|
|
|
|
2016-02-03 11:26:24 +00:00
|
|
|
incoming_json = request.get_json()
|
|
|
|
if not incoming_json:
|
|
|
|
abort(400, "No JSON provided!")
|
|
|
|
return
|
|
|
|
print("Incoming JSON: %s" % (incoming_json,))
|
|
|
|
|
2019-04-26 14:26:03 +00:00
|
|
|
event = incoming_json.get("event")
|
|
|
|
if event is None:
|
|
|
|
abort(400, "No 'event' specified")
|
2016-02-03 11:26:24 +00:00
|
|
|
return
|
|
|
|
|
2019-04-26 14:26:03 +00:00
|
|
|
if event == 'ping':
|
|
|
|
print("Got ping request - responding")
|
|
|
|
return jsonify({'response': 'pong!'})
|
|
|
|
|
|
|
|
if event != 'build.finished':
|
|
|
|
print("Rejecting '%s' event")
|
|
|
|
abort(400, "Unrecognised event")
|
2016-02-03 11:26:24 +00:00
|
|
|
return
|
|
|
|
|
2019-04-26 14:26:03 +00:00
|
|
|
build_obj = incoming_json.get("build")
|
|
|
|
if build_obj is None:
|
|
|
|
abort(400, "No 'build' object")
|
|
|
|
return
|
2017-01-17 13:55:33 +00:00
|
|
|
|
2019-04-26 14:26:03 +00:00
|
|
|
build_url = build_obj.get('url')
|
|
|
|
if build_url is None:
|
|
|
|
abort(400, "build has no url")
|
2016-02-03 11:26:24 +00:00
|
|
|
return
|
|
|
|
|
2019-04-26 14:26:03 +00:00
|
|
|
if required_api_prefix is not None and not build_url.startswith(required_api_prefix):
|
|
|
|
print("Denying poke for build url with incorrect prefix: %s" % (build_url,))
|
|
|
|
abort(400, "Invalid build url")
|
2016-02-03 11:26:24 +00:00
|
|
|
return
|
|
|
|
|
2019-04-26 14:26:03 +00:00
|
|
|
build_num = build_obj.get('number')
|
|
|
|
if build_num is None:
|
|
|
|
abort(400, "build has no number")
|
2016-02-03 11:26:24 +00:00
|
|
|
return
|
|
|
|
|
2019-04-26 14:26:03 +00:00
|
|
|
pipeline_obj = incoming_json.get("pipeline")
|
|
|
|
if pipeline_obj is None:
|
|
|
|
abort(400, "No 'pipeline' object")
|
|
|
|
return
|
|
|
|
|
|
|
|
pipeline_name = pipeline_obj.get('name')
|
|
|
|
if pipeline_name is None:
|
|
|
|
abort(400, "pipeline has no name")
|
|
|
|
return
|
|
|
|
|
|
|
|
artifacts_url = build_url + "/artifacts"
|
|
|
|
artifacts_resp = requests.get(artifacts_url, headers=req_headers())
|
|
|
|
artifacts_resp.raise_for_status()
|
|
|
|
artifacts_array = artifacts_resp.json()
|
2022-08-09 12:23:41 +00:00
|
|
|
|
2019-04-26 15:26:48 +00:00
|
|
|
artifact_to_deploy = None
|
2019-04-26 14:26:03 +00:00
|
|
|
for artifact in artifacts_array:
|
|
|
|
if re.match(r"dist/.*.tar.gz", artifact['path']):
|
|
|
|
artifact_to_deploy = artifact
|
2020-03-25 15:12:53 +00:00
|
|
|
if artifact_to_deploy is None:
|
|
|
|
print("No suitable artifacts found")
|
|
|
|
return jsonify({})
|
2019-04-26 14:26:03 +00:00
|
|
|
|
|
|
|
# double paranoia check: make sure the artifact is on the right org too
|
|
|
|
if required_api_prefix is not None and not artifact_to_deploy['url'].startswith(required_api_prefix):
|
|
|
|
print("Denying poke for build url with incorrect prefix: %s" % (artifact_to_deploy['url'],))
|
|
|
|
abort(400, "Refusing to deploy artifact from URL %s", artifact_to_deploy['url'])
|
|
|
|
return
|
|
|
|
|
2019-05-02 17:05:11 +00:00
|
|
|
# there's no point building up a queue of things to deploy, so if there are any pending jobs,
|
|
|
|
# remove them
|
|
|
|
while not workQueue.empty():
|
|
|
|
try:
|
|
|
|
workQueue.get(False)
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
workQueue.put([artifact_to_deploy, pipeline_name, build_num])
|
|
|
|
|
|
|
|
return jsonify({})
|
2019-04-26 14:26:03 +00:00
|
|
|
|
|
|
|
def deploy_buildkite_artifact(artifact, pipeline_name, build_num):
|
|
|
|
artifact_response = requests.get(artifact['url'], headers=req_headers())
|
|
|
|
artifact_response.raise_for_status()
|
|
|
|
artifact_obj = artifact_response.json()
|
2016-02-03 11:26:24 +00:00
|
|
|
|
2017-01-17 13:55:33 +00:00
|
|
|
# we extract into a directory based on the build number. This avoids the
|
|
|
|
# problem of multiple builds building the same git version and thus having
|
|
|
|
# the same tarball name. That would lead to two potential problems:
|
|
|
|
# (a) sometimes jenkins serves corrupted artifacts; we would replace
|
|
|
|
# a good deploy with a bad one
|
|
|
|
# (b) we'll be overwriting the live deployment, which means people might
|
|
|
|
# see half-written files.
|
2019-04-26 14:26:03 +00:00
|
|
|
build_dir = os.path.join(arg_extract_path, "%s-#%s" % (pipeline_name, build_num))
|
2017-01-17 13:55:33 +00:00
|
|
|
try:
|
2019-04-26 14:26:03 +00:00
|
|
|
extracted_dir = deploy_tarball(artifact_obj, build_dir)
|
2017-01-17 13:55:33 +00:00
|
|
|
except DeployException as e:
|
2017-07-20 10:25:19 +00:00
|
|
|
traceback.print_exc()
|
2017-01-17 13:55:33 +00:00
|
|
|
abort(400, e.message)
|
|
|
|
|
2017-01-17 22:25:02 +00:00
|
|
|
|
2019-04-26 14:26:03 +00:00
|
|
|
def deploy_tarball(artifact, build_dir):
|
2017-01-17 22:25:02 +00:00
|
|
|
"""Download a tarball from jenkins and unpack it
|
2017-01-17 13:55:33 +00:00
|
|
|
|
2017-01-17 22:25:02 +00:00
|
|
|
Returns:
|
|
|
|
(str) the path to the unpacked deployment
|
|
|
|
"""
|
2017-01-17 13:55:33 +00:00
|
|
|
if os.path.exists(build_dir):
|
|
|
|
raise DeployException(
|
|
|
|
"Not deploying. We have previously deployed this build."
|
|
|
|
)
|
|
|
|
os.mkdir(build_dir)
|
2016-12-22 12:02:54 +00:00
|
|
|
|
2019-04-26 15:19:49 +00:00
|
|
|
print("Fetching artifact %s -> %s..." % (artifact['download_url'], artifact['filename']))
|
2019-04-26 15:16:14 +00:00
|
|
|
|
2019-04-26 14:26:03 +00:00
|
|
|
# Download the tarball here as buildkite needs auth to do this
|
|
|
|
# we don't pgp-sign buildkite artifacts, relying on HTTPS and buildkite
|
2020-09-30 11:09:47 +00:00
|
|
|
# not being evil. If that's not good enough for you, don't use develop.element.io.
|
2019-04-26 14:26:03 +00:00
|
|
|
resp = requests.get(artifact['download_url'], stream=True, headers=req_headers())
|
|
|
|
resp.raise_for_status()
|
|
|
|
with open(artifact['filename'], 'wb') as ofp:
|
|
|
|
shutil.copyfileobj(resp.raw, ofp)
|
2019-04-26 15:16:14 +00:00
|
|
|
print("...download complete. Deploying...")
|
2019-04-26 14:26:03 +00:00
|
|
|
|
2016-12-22 12:02:54 +00:00
|
|
|
# we rely on the fact that flask only serves one request at a time to
|
|
|
|
# ensure that we do not overwrite a tarball from a concurrent request.
|
2016-09-16 17:15:15 +00:00
|
|
|
|
2019-04-26 14:26:03 +00:00
|
|
|
return deployer.deploy(artifact['filename'], build_dir)
|
2017-01-17 13:55:33 +00:00
|
|
|
|
2016-02-03 11:26:24 +00:00
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
parser = argparse.ArgumentParser("Runs a Vector redeployment server.")
|
|
|
|
parser.add_argument(
|
2016-02-03 13:11:51 +00:00
|
|
|
"-p", "--port", dest="port", default=4000, type=int, help=(
|
2016-02-03 11:26:24 +00:00
|
|
|
"The port to listen on for requests from Jenkins."
|
|
|
|
)
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"-e", "--extract", dest="extract", default="./extracted", help=(
|
|
|
|
"The location to extract .tar.gz files to."
|
|
|
|
)
|
|
|
|
)
|
2017-01-17 13:55:33 +00:00
|
|
|
parser.add_argument(
|
|
|
|
"-b", "--bundles-dir", dest="bundles_dir", help=(
|
|
|
|
"A directory to move the contents of the 'bundles' directory to. A \
|
|
|
|
symlink to the bundles directory will also be written inside the \
|
|
|
|
extracted tarball. Example: './bundles'."
|
|
|
|
)
|
|
|
|
)
|
2016-02-03 11:26:24 +00:00
|
|
|
parser.add_argument(
|
|
|
|
"-c", "--clean", dest="clean", action="store_true", default=False, help=(
|
|
|
|
"Remove .tar.gz files after they have been downloaded and extracted."
|
|
|
|
)
|
|
|
|
)
|
2016-02-03 12:00:17 +00:00
|
|
|
parser.add_argument(
|
|
|
|
"-s", "--symlink", dest="symlink", default="./latest", help=(
|
|
|
|
"Write a symlink to this location pointing to the extracted tarball. \
|
|
|
|
New builds will keep overwriting this symlink. The symlink will point \
|
|
|
|
to the /vector directory INSIDE the tarball."
|
|
|
|
)
|
|
|
|
)
|
2017-07-20 10:02:10 +00:00
|
|
|
|
2017-09-20 16:22:47 +00:00
|
|
|
# --include ../../config.json ./localhost.json homepages/*
|
2016-09-16 17:26:46 +00:00
|
|
|
parser.add_argument(
|
2017-09-20 16:22:47 +00:00
|
|
|
"--include", nargs='*', default='./config*.json', help=(
|
|
|
|
"Symlink these files into the root of the deployed tarball. \
|
|
|
|
Useful for config files and home pages. Supports glob syntax. \
|
|
|
|
(Default: '%(default)s')"
|
2016-09-16 17:26:46 +00:00
|
|
|
)
|
|
|
|
)
|
2017-01-17 13:55:33 +00:00
|
|
|
parser.add_argument(
|
|
|
|
"--test", dest="tarball_uri", help=(
|
|
|
|
"Don't start an HTTP listener. Instead download a build from Jenkins \
|
|
|
|
immediately."
|
|
|
|
),
|
|
|
|
)
|
|
|
|
|
2019-04-26 14:26:03 +00:00
|
|
|
parser.add_argument(
|
|
|
|
"--webhook-token", dest="webhook_token", help=(
|
|
|
|
"Only accept pokes with this buildkite token."
|
|
|
|
), required=True,
|
|
|
|
)
|
|
|
|
|
|
|
|
parser.add_argument(
|
|
|
|
"--api-token", dest="api_token", help=(
|
|
|
|
"API access token for buildkite. Require read_artifacts scope."
|
|
|
|
), required=True,
|
|
|
|
)
|
|
|
|
|
|
|
|
# We require a matching webhook token, but because we take everything else
|
|
|
|
# about what to deploy from the poke body, we can be a little more paranoid
|
|
|
|
# and only accept builds / artifacts from a specific buildkite org
|
|
|
|
parser.add_argument(
|
2019-04-26 14:40:05 +00:00
|
|
|
"--org", dest="buildkite_org", help=(
|
2019-04-26 14:26:03 +00:00
|
|
|
"Lock down to this buildkite org"
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2016-02-03 11:26:24 +00:00
|
|
|
args = parser.parse_args()
|
|
|
|
arg_extract_path = args.extract
|
2019-04-26 14:26:03 +00:00
|
|
|
arg_webbook_token = args.webhook_token
|
|
|
|
arg_api_token = args.api_token
|
2019-04-26 15:19:49 +00:00
|
|
|
arg_buildkite_org = args.buildkite_org
|
2017-01-17 13:55:33 +00:00
|
|
|
|
|
|
|
if not os.path.isdir(arg_extract_path):
|
|
|
|
os.mkdir(arg_extract_path)
|
|
|
|
|
2017-01-17 22:25:02 +00:00
|
|
|
deployer = Deployer()
|
|
|
|
deployer.bundles_path = args.bundles_dir
|
|
|
|
deployer.should_clean = args.clean
|
2022-08-09 12:23:41 +00:00
|
|
|
deployer.symlink_latest = args.symlink
|
2017-09-20 16:22:47 +00:00
|
|
|
|
|
|
|
for include in args.include:
|
|
|
|
deployer.symlink_paths.update({ os.path.basename(pth): pth for pth in glob.iglob(include) })
|
2017-07-20 10:02:10 +00:00
|
|
|
|
2017-01-17 13:55:33 +00:00
|
|
|
if args.tarball_uri is not None:
|
|
|
|
build_dir = os.path.join(arg_extract_path, "test-%i" % (time.time()))
|
|
|
|
deploy_tarball(args.tarball_uri, build_dir)
|
|
|
|
else:
|
|
|
|
print(
|
2019-04-26 14:26:03 +00:00
|
|
|
"Listening on port %s. Extracting to %s%s. Symlinking to %s. Include files: %s" %
|
2017-01-17 22:25:02 +00:00
|
|
|
(args.port,
|
|
|
|
arg_extract_path,
|
|
|
|
" (clean after)" if deployer.should_clean else "",
|
2022-08-09 12:23:41 +00:00
|
|
|
args.symlink,
|
2017-09-20 16:22:47 +00:00
|
|
|
deployer.symlink_paths,
|
2017-01-17 22:25:02 +00:00
|
|
|
)
|
2017-01-17 13:55:33 +00:00
|
|
|
)
|
2019-05-02 17:05:11 +00:00
|
|
|
fred = threading.Thread(target=worker_thread)
|
|
|
|
fred.daemon = True
|
|
|
|
fred.start()
|
2019-04-26 14:26:03 +00:00
|
|
|
app.run(port=args.port, debug=False)
|