D6922: ci: implement a "try server"

indygreg (Gregory Szorc) phabricator at mercurial-scm.org
Sat Oct 5 18:37:02 UTC 2019


indygreg updated this revision to Diff 16853.

CHANGES SINCE LAST UPDATE
  https://phab.mercurial-scm.org/D6922?vs=16717&id=16853

CHANGES SINCE LAST ACTION
  https://phab.mercurial-scm.org/D6922/new/

REVISION DETAIL
  https://phab.mercurial-scm.org/D6922

AFFECTED FILES
  contrib/ci/README.rst
  contrib/ci/lambda_functions/ci.py
  contrib/ci/lambda_functions/web.py
  contrib/ci/requirements.txt
  contrib/ci/requirements.txt.in
  contrib/ci/terraform/iam_users.tf
  contrib/ci/terraform/repo_change_reactor.tf
  contrib/ci/terraform/storage.tf
  contrib/ci/terraform/try_server.tf
  contrib/ci/terraform/user_account/main.tf
  contrib/ci/terraform/worker.tf
  tests/test-check-code.t

CHANGE DETAILS

diff --git a/tests/test-check-code.t b/tests/test-check-code.t
--- a/tests/test-check-code.t
+++ b/tests/test-check-code.t
@@ -20,6 +20,8 @@
   Skipping contrib/automation/hgautomation/ssh.py it has no-che?k-code (glob)
   Skipping contrib/automation/hgautomation/windows.py it has no-che?k-code (glob)
   Skipping contrib/automation/hgautomation/winrm.py it has no-che?k-code (glob)
+  Skipping contrib/ci/hgci/cli.py it has no-che?k-code (glob)
+  Skipping contrib/ci/hgci/try_server.py it has no-che?k-code (glob)
   Skipping contrib/ci/lambda_functions/ci.py it has no-che?k-code (glob)
   Skipping contrib/ci/lambda_functions/web.py it has no-che?k-code (glob)
   Skipping contrib/packaging/hgpackaging/downloads.py it has no-che?k-code (glob)
diff --git a/contrib/ci/terraform/worker.tf b/contrib/ci/terraform/worker.tf
--- a/contrib/ci/terraform/worker.tf
+++ b/contrib/ci/terraform/worker.tf
@@ -13,6 +13,12 @@
 }
 
 data "aws_iam_policy_document" "ci_worker" {
+  # Allow CI worker EC2 instances to fetch try bundles from S3.
+  statement {
+    effect = "Allow"
+    actions = ["s3:GetObject"]
+    resources = ["${aws_s3_bucket.mercurial_try_bundles.arn}/*"]
+  }
   # Allow CI worker EC2 instances to write artifacts to S3.
   statement {
     effect = "Allow"
diff --git a/contrib/ci/terraform/user_account/main.tf b/contrib/ci/terraform/user_account/main.tf
new file mode 100644
--- /dev/null
+++ b/contrib/ci/terraform/user_account/main.tf
@@ -0,0 +1,100 @@
+variable "pgp_key" {
+  default = <<EOF
+mQINBF2XVggBEAC105QG5tUX2fbiNaK1jq6s1iTp0py5E4qVFD+yxxiJib/ZdlTz
+wEIbQN5CV6gRVIzmviym+BniqTGYzfbtMOet/mQ9Kd0h3M0R/CTfHjI+m9LQZfjM
+ZuCpLhNaG6su61d28f7mlTEJh9PpfMOSoWq7JK/ansvIHlzYCKbjS3ESjNsl1eN9
+80eaUZqdKrtrFLrRiJSMMs1lR9wVFCe02S/ltAS8PMrhVmkmtbKzTgti5cdbRBgd
+nJlZ9l5YBW+XC+fSfwqmODEb8qz7zxxeVAXouBl+89P2HKX7767LwyX+leYDn56Z
+Hk5P0XGEh/MfCNGIXDN6uKI51gU/nD4JGFa9vV8sXdBbAeJZcxz9tQj2h76j4P1F
+FXbx66wXCEJPO3sOykEXUCk7PXaJ47Pfuhs2uhSVIDYXxF8VMUKDzYNicyH8DJNv
+VDlbc02bVTieaFZe20n7MPHWisNwU5osC1Al/F2QzMMEicfdRuuJdtrr4CRQIk6C
+AKyNuxbPdk6InzCpBGKAy5wtCT4M2XdN+93BdItQP7as8sMiuWzMP8o8zsEyKT4X
++PJ8BagNMjAWybleyjNIAPPwbz0farcsuNCrZOF1wFQZ+b6MTgNWcEnhpPw+1Qvx
+lxI32tutkl69ZUXN59ir0sSgWqltVVHvfG7GtHSiSfU/DRNGb0PmXK7JmwARAQAB
+tCdHcmVnb3J5IFN6b3JjIDxncmVnb3J5LnN6b3JjQGdtYWlsLmNvbT6JAk4EEwEK
+ADgWIQQknzIO3OjuC9nk35un3X50ZGrW7wUCXZdWCAIbAwULCQgHAgYVCgkICwIE
+FgIDAQIeAQIXgAAKCRCn3X50ZGrW703eD/0Tq8/YJztMtz9tLZ3Mju39+bZaONpI
+dQUn4zGxHLuJRWDvhNocEhlAeEWfBJe5mBohlJDytnxA61Nf7CtmA/fghP7P243x
+dgzRW6NQTSjgsMGJ/gJHHh0+iV2X/Ry7trl1Hg39Oww1e0xr2RcrXUqjMT5hUVNZ
+4fMd97HyOx1Gkenx1a3ckNALPQG4Yi/41Hz5MkIuZv5CQD8V0/hMbS1rQ2modQXq
+cAZt4K3E6NFRVRYo4eOGdXlbW0eGp+bHEbkTqJojVmq/itzLNuY0AwvfkBHs3yi2
+9SYR/+LSiq2WrGw/av7vncNzmKL1l+DmiEExvxl6XXsTzpdO0P4V+vYTeOyjm1Hc
+ySMWsHlp3i09BRtAq7Us1wVnK7c/5g/+swfBQM5Pf8bjpk5wvYmgUEFgiBUZXt/M
+HikQVRBhvs+sz95kFDJ9ZKfp/ZGZu6kVv3l0qa5aUqDlb6/lMG/ZPoVFULv0hmRm
+xW2n5HyGGSCizPq57EOGx0/nWPEJxMIpoEnit65hDYhtzujVPDdunFwCHRWCAKsS
+XKorMUxJo+5Wz2gsaILFxsM9E2jWH5ZBPEbQvTQ4bKvgXaAqceUjzIts4tA3FI2E
+5X2Pqtkd8n0h9ynBqRQtWqrqTaeKtMdB34LDuSnkqG1akuTAUr1z4oz4VtuOQjp7
+uvI/sL6EqIbbZLkCDQRdl1YIARAAm27yGMfRRk5hmd2K+DXMWCN33jVtFxM2UYwx
+VpsFXyS8uLZM/AcW7dk9zK3c4ZfD2ilUxpxzXEGoCNuBAjsv2dVJagG2Tq+5l8Cr
+KKS+bbFubM2xwbSTOYZcUJujjPGzQS+nZq68pal3l+gV2cTuqwSK8qkK21g/U79g
+qDQL7kNtx25gxuTCEpwvhq+OpimyKGhbSDt3fRUKgIQz0n6idS9irvv4M44n/avl
+slT+dfwjinDiZvu+jS1mvO2fQbI9wfcWSldR3Kvi9a4fEJGCT3fhpYmZiHy58t3y
+oDVfv6BROhcC7+grmi3Vf1LX8Hn1oYD/v9l45HEBzu5grEEufkUo1y5mdKBVpump
+HsQJmeCi+jaKkrsI1/6Hgv05yG11IBdRj4oS8aoZd51sXw3nAu4F5isoC7zMphGC
+WeZs5oMQI/lwCGW6PBID2eHGNBxDXas9SddSluktMOO8E1wJ0tVbM4UJhDXog5Nd
+CjrgCaceUg3R/rztOB9uji8ECRH8sLKEGh03+rOKl37lnQ69uszVVNHhLlovFZjW
+QDyXpqpcqVxYYnYcuv4bK4qMUCXKVEqkNuoCF/9LQels4GwENnj/d0xYplw3Csla
+Zoc1qpkdIPERSVnYHNUo4t27Uxum67H4NNezSfTX6o/dDYCD9iR/IU8saZo67NrW
+x4J+ny8AEQEAAYkCNgQYAQoAIBYhBCSfMg7c6O4L2eTfm6fdfnRkatbvBQJdl1YI
+AhsMAAoJEKfdfnRkatbvUlkP/juSplmiEALQmQSnZAxfhuuehXlll4ln8gcahe8z
+3NiwgNOk/4AnIC2fP4lQgi/+cHGEmQyS7TVYWAiATMizsTxtNOTCrzRdLYHC/pTO
+laXsKwUB1CeM4C2VnPUYqJ1+R8BYF8BNB9RpGENXRtZCjkPu0Pwf286UQeou6o0Y
+VtIndrIVaUHjnpTjAoz/iC2Nk8/t/78to/JDTnX5rdNaL6p03JQbsv87gcugpQBF
+885FfDsTU47R6IubG7o5Qy65MrlEdw5y2hZdC1Pkvez6ttei4eRPguGQqM4nosGp
+JW31EHMQjSHyF27pQ79jzGpIo1zQdb4VNISFiRL6JgZq59paKQBTonC3ScZRplre
+loM5Qbt4sF1kFVKqoFx1J4MAid1kxzEmqXnheeU8jwKWymiqKAExJlT3DgXtKtjy
+9AzZE+wP3yZyV2edFmsD9t0p/spl2S9LZFKY4KAgSAz/JSwp+JJLxcOEptO/jwel
+sLa3if5JAy/Bubqz0nBWWXgRnkTjcdLPpOWpJ7gCwavgHzcqVhDKUa3pRXUE13SO
+g8BQXkLhlPf7PbXtoiUfHbtNXOWYk3MEBIsH0ZWts46VC7ONE/xNwhoEmXYx4gnS
+6DLj0UU5eOUslzWdy/0WuTgBEnpfSh3VR7obXQO1uXX4o/rafVpjbqZYW7rKoQWZ
+r3vA
+EOF
+}
+
+variable "username" {
+  type = string
+}
+
+variable "access_to_try" {
+  type = bool
+  default = true
+}
+
+resource "aws_iam_user" "user" {
+  name = var.username
+}
+
+output "username" {
+  value = aws_iam_user.user.name
+}
+
+resource "aws_iam_user_login_profile" "login_profile" {
+  user = aws_iam_user.user.name
+  pgp_key = var.pgp_key
+}
+
+data "aws_iam_policy_document" "user_policy" {
+  statement {
+    effect = "Allow"
+    actions = [
+      "iam:ChangePassword",
+      "iam:CreateAccessKey",
+      "iam:DeleteAccessKey",
+      "iam:ListAccessKeys",
+      "iam:UpdateAccessKey",
+    ]
+    resources = [aws_iam_user.user.arn]
+  }
+}
+
+resource "aws_iam_user_policy" "user_policy" {
+  user = aws_iam_user.user.name
+  name = aws_iam_user.user.name
+  policy = data.aws_iam_policy_document.user_policy.json
+}
+
+resource "aws_iam_user_group_membership" "try_membership" {
+  count = var.access_to_try ? 1 : 0
+  user = aws_iam_user.user.name
+  groups = ["access-to-try-upload"]
+}
diff --git a/contrib/ci/terraform/try_server.tf b/contrib/ci/terraform/try_server.tf
new file mode 100644
--- /dev/null
+++ b/contrib/ci/terraform/try_server.tf
@@ -0,0 +1,108 @@
+# Holds user-uploaded Mercurial bundles to trigger CI on.
+resource "aws_s3_bucket" "mercurial_try_bundles" {
+  bucket = "mercurial-try-bundles"
+  region = "us-west-2"
+  acl = "private"
+
+  lifecycle_rule {
+    id = "Purge old objects"
+    enabled = true
+    expiration {
+      days = 30
+    }
+  }
+}
+
+# Logging for Lambda function.
+resource "aws_cloudwatch_log_group" "lambda_ci_try_server_upload" {
+  name = "/aws/lambda/${aws_lambda_function.ci_try_server_upload.function_name}"
+  # Longer than other log groups for auditing purposes.
+  retention_in_days = 30
+}
+
+# Role for Lambda function.
+resource "aws_iam_role" "lambda_ci_try_server_upload" {
+  name = "lambda-ci-try-server-upload"
+  description = "For Lambaa function handling Try server uploads"
+  assume_role_policy = data.aws_iam_policy_document.assume_role_lambda.json
+}
+
+# The Lambda function which handles bundle upload and triggering jobs.
+resource "aws_lambda_function" "ci_try_server_upload" {
+  function_name = "ci-try-server-upload"
+  description = "Handles uploading of bundles to the Try server"
+  filename = data.archive_file.lambda_ci.output_path
+  handler = "ci.handle_try_server_upload"
+  source_code_hash = data.archive_file.lambda_ci.output_base64sha256
+  runtime = "python3.7"
+  timeout = 60
+  role = aws_iam_role.lambda_ci_try_server_upload.arn
+  environment {
+    variables = {
+      DYNAMODB_PUSH_TABLE = aws_dynamodb_table.ci_push.name
+      S3_TRY_BUNDLES_BUCKET = aws_s3_bucket.mercurial_try_bundles.bucket
+      WEB_URL = "https://${aws_api_gateway_domain_name.ci_web.domain_name}/"
+    }
+  }
+}
+
+data "aws_iam_policy_document" "ci_try_server_upload" {
+  # Allow Lambda function to write CloudWatch events.
+  statement {
+    effect = "Allow"
+    actions = [
+      "logs:CreateLogGroup",
+      "logs:CreateLogStream",
+      "logs:PutLogEvents",
+    ]
+    resources = [aws_cloudwatch_log_group.lambda_ci_try_server_upload.arn]
+  }
+  # Enable writing bundle to S3 bucket.
+  statement {
+    effect = "Allow"
+    actions = [
+      "s3:PutObject",
+      "s3:PutObjectAcl",
+    ]
+    resources = ["${aws_s3_bucket.mercurial_try_bundles.arn}/*"]
+  }
+  # Enable querying and inserting pushes into DynamoDB.
+  statement {
+    effect = "Allow"
+    actions = [
+      "dynamodb:PutItem",
+      "dynamodb:Query",
+    ]
+    resources = [
+      aws_dynamodb_table.ci_push.arn,
+      "${aws_dynamodb_table.ci_push.arn}/*",
+    ]
+  }
+}
+
+resource "aws_iam_role_policy" "lambda_ci_try_server_upload" {
+  role = aws_iam_role.lambda_ci_try_server_upload.name
+  name = aws_iam_role.lambda_ci_try_server_upload.name
+  policy = data.aws_iam_policy_document.ci_try_server_upload.json
+}
+
+# An IAM group where membership will grant permission to invoke
+# Lambda function.
+resource "aws_iam_group" "access_to_try_upload" {
+  name = "access-to-try-upload"
+}
+
+data "aws_iam_policy_document" "access_to_try_upload" {
+  # Allow invoking the try upload Lambda function.
+  statement {
+    effect = "Allow"
+    actions = ["lambda:InvokeFunction"]
+    resources = [aws_lambda_function.ci_try_server_upload.arn]
+  }
+}
+
+resource "aws_iam_group_policy" "access_to_try_upload" {
+  name = aws_iam_group.access_to_try_upload.name
+  group = aws_iam_group.access_to_try_upload.name
+  policy = data.aws_iam_policy_document.access_to_try_upload.json
+}
diff --git a/contrib/ci/terraform/storage.tf b/contrib/ci/terraform/storage.tf
--- a/contrib/ci/terraform/storage.tf
+++ b/contrib/ci/terraform/storage.tf
@@ -25,10 +25,23 @@
     name = "push_id"
     type = "S"
   }
+  attribute {
+    name = "node"
+    type = "S"
+  }
 
   hash_key = "repo"
   range_key = "push_id"
 
+  # This allows us to easily query for whether a push already exists for
+  # a specific node in a repository.
+  global_secondary_index {
+    name = "repo-node"
+    hash_key = "repo"
+    range_key = "node"
+    projection_type = "KEYS_ONLY"
+  }
+
   stream_enabled = true
   stream_view_type = "NEW_AND_OLD_IMAGES"
 }
diff --git a/contrib/ci/terraform/repo_change_reactor.tf b/contrib/ci/terraform/repo_change_reactor.tf
--- a/contrib/ci/terraform/repo_change_reactor.tf
+++ b/contrib/ci/terraform/repo_change_reactor.tf
@@ -71,7 +71,6 @@
     ]
     resources = ["*"]
   }
-  # Allow querying and recording job state in DynamoDB.
   statement {
     effect = "Allow"
     actions = [
diff --git a/contrib/ci/terraform/iam_users.tf b/contrib/ci/terraform/iam_users.tf
new file mode 100644
--- /dev/null
+++ b/contrib/ci/terraform/iam_users.tf
@@ -0,0 +1,31 @@
+# This file defines AWS account users.
+
+module "user_alphare" {
+  source = "./user_account"
+  username = "alphare"
+}
+
+module "user_augie" {
+  source = "./user_account"
+  username = "augie"
+}
+
+module "user_gps" {
+  source = "./user_account"
+  username = "gps"
+}
+
+module "user_gracinet" {
+  source = "./user_account"
+  username = "gracinet"
+}
+
+module "user_lothiraldan" {
+  source = "./user_account"
+  username = "lothiraldan"
+}
+
+module "user_marmoute" {
+  source = "./user_account"
+  username = "marmoute"
+}
diff --git a/contrib/ci/requirements.txt.in b/contrib/ci/requirements.txt.in
new file mode 100644
--- /dev/null
+++ b/contrib/ci/requirements.txt.in
@@ -0,0 +1 @@
+boto3
diff --git a/contrib/ci/requirements.txt b/contrib/ci/requirements.txt
new file mode 100644
--- /dev/null
+++ b/contrib/ci/requirements.txt
@@ -0,0 +1,38 @@
+#
+# This file is autogenerated by pip-compile
+# To update, run:
+#
+#    pip-compile --generate-hashes --output-file=contrib/ci/requirements.txt contrib/ci/requirements.txt.in
+#
+boto3==1.9.238 \
+    --hash=sha256:2fc1c407a5ab08cfcf54eb4171d85c523bd27019ab890de257d018af2770f71d \
+    --hash=sha256:c215cf2c8e5e7b28ae7544b1cbdbc3216bef983d7adb8b701a64f9b893e0320b
+botocore==1.12.238 \
+    --hash=sha256:1ca993f0dc70591e0fca6cf3837ee9be52fd4fbbf1aa96ba1d4a860b41f676b7 \
+    --hash=sha256:6ec3297b87d3e2c4d88b009f91061aaecdb2ceef6d9be9386394571353909adb \
+    # via boto3, s3transfer
+docutils==0.15.2 \
+    --hash=sha256:6c4f696463b79f1fb8ba0c594b63840ebd41f059e92b31957c46b74a4599b6d0 \
+    --hash=sha256:9e4d7ecfc600058e07ba661411a2b7de2fd0fafa17d1a7f7361cd47b1175c827 \
+    --hash=sha256:a2aeea129088da402665e92e0b25b04b073c04b2dce4ab65caaa38b7ce2e1a99 \
+    # via botocore
+jmespath==0.9.4 \
+    --hash=sha256:3720a4b1bd659dd2eecad0666459b9788813e032b83e7ba58578e48254e0a0e6 \
+    --hash=sha256:bde2aef6f44302dfb30320115b17d030798de8c4110e28d5cf6cf91a7a31074c \
+    # via boto3, botocore
+python-dateutil==2.8.0 \
+    --hash=sha256:7e6584c74aeed623791615e26efd690f29817a27c73085b78e4bad02493df2fb \
+    --hash=sha256:c89805f6f4d64db21ed966fda138f8a5ed7a4fdbc1a8ee329ce1b74e3c74da9e \
+    # via botocore
+s3transfer==0.2.1 \
+    --hash=sha256:6efc926738a3cd576c2a79725fed9afde92378aa5c6a957e3af010cb019fac9d \
+    --hash=sha256:b780f2411b824cb541dbcd2c713d0cb61c7d1bcadae204cdddda2b35cef493ba \
+    # via boto3
+six==1.12.0 \
+    --hash=sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c \
+    --hash=sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73 \
+    # via python-dateutil
+urllib3==1.25.6 \
+    --hash=sha256:3de946ffbed6e6746608990594d08faac602528ac7015ac28d33cee6a45b7398 \
+    --hash=sha256:9a107b99a5393caf59c7aa3c1249c16e6879447533d0887f4336dde834c7be86 \
+    # via botocore
diff --git a/contrib/ci/lambda_functions/web.py b/contrib/ci/lambda_functions/web.py
--- a/contrib/ci/lambda_functions/web.py
+++ b/contrib/ci/lambda_functions/web.py
@@ -66,11 +66,21 @@
         '</style>',
     ]
 
-    for repo_entry in repo_poll_table.scan(Select='ALL_ATTRIBUTES')['Items']:
+    repos = repo_poll_table.scan(Select='ALL_ATTRIBUTES')['Items']
+
+    # Try repository is virtual. Add it manually.
+    repos.append({
+        'repo': 'try',
+    })
+
+    for repo_entry in repos:
         repo_name = repo_entry['repo']
-        repo_url = repo_entry['repo_url']
+        repo_url = repo_entry.get('repo_url', None)
 
-        html.append('<h1><a href="%s">%s</a></h1>' % (e(repo_url, quote=True), e(repo_name)))
+        if repo_url:
+            html.append('<h1><a href="%s">%s</a></h1>' % (e(repo_url, quote=True), e(repo_name)))
+        else:
+            html.append('<h1>%s</h1>' % e(repo_name))
 
         res = push_table.query(
             KeyConditionExpression=Key('repo').eq(repo_name),
@@ -401,9 +411,14 @@
 def push_info(push, repo_url):
     cset_url = '%s/rev/%s' % (repo_url, push['node'])
 
+    if repo_url:
+        cset_entry = '<a href="%s">%s</a>' % (
+            e(cset_url, quote=True), e(push['node']))
+    else:
+        cset_entry = e(push['node'])
+
     return ''.join([
-        '<h2>Changeset <span class="mono"><a href="%s">%s</a></span></h2>' % (
-            e(cset_url, quote=True), e(push['node'])),
+        '<h2>Changeset <span class="mono">%s</span></h2>' % cset_entry,
         '<p>branch: <span class="mono">%s</span></p>' % e(push['branch']),
         '<p>author: <span class="mono">%s</span></p>' % e(push['user']),
         '<p>description: <span class="mono">%s</span></p>' % e(push['message'].splitlines()[0]),
diff --git a/contrib/ci/lambda_functions/ci.py b/contrib/ci/lambda_functions/ci.py
--- a/contrib/ci/lambda_functions/ci.py
+++ b/contrib/ci/lambda_functions/ci.py
@@ -13,8 +13,12 @@
 import os
 import time
 import urllib.request
+import uuid
 
 import boto3
+from boto3.dynamodb.conditions import (
+    Key,
+)
 
 
 def handle_poll_repo(event, context):
@@ -40,7 +44,11 @@
             continue
 
         record = record['dynamodb']['NewImage']
-        schedule_ci(record['repo_url']['S'], record['repo']['S'], record['node']['S'])
+
+        bundle_url = record['bundle_url']['S'] if 'bundle_url' in record else None
+
+        schedule_ci(record['repo_url']['S'], record['repo']['S'],
+                    record['node']['S'], bundle_url=bundle_url)
 
 
 def handle_pending_job(event, context):
@@ -99,6 +107,42 @@
     react_to_instance_state_change(job_table, instance, state)
 
 
+def handle_try_server_upload(event, context):
+    """Handler for receiving a Try Server upload request.
+
+    This is invoked by end-users to upload a bundle and trigger jobs on it.
+    """
+    # Since the function is invoked directly, any print()ed output can be
+    # seen by the end-user. So be careful about what is logged.
+    messages = []
+
+    for a in ('bundle', 'node', 'branch', 'user', 'message'):
+        if a not in event:
+            messages.append('error: missing argument "%s"' % a)
+
+    if messages:
+        return messages
+
+    s3 = boto3.resource('s3')
+    dynamodb = boto3.resource('dynamodb')
+
+    bundle_bucket = s3.Bucket(os.environ['S3_TRY_BUNDLES_BUCKET'])
+    push_table = dynamodb.Table(os.environ['DYNAMODB_PUSH_TABLE'])
+
+    return process_try_bundle(
+        bundle_bucket,
+        push_table,
+        base64.b64decode(event['bundle']),
+        event['node'],
+        event['branch'],
+        event['user'],
+        event['message'],
+        os.environ['WEB_URL'],
+    )
+
+    return messages
+
+
 def next_build_number(job_table, repo, node, job_name):
     """Find the next available build number for a job given its unique name."""
 
@@ -190,8 +234,9 @@
     poll_table.put_item(Item=new_state)
 
 
-def schedule_ci(repo_url, repo, node):
-    print('scheduling CI for revision %s on %s' % (node, repo_url))
+def schedule_ci(repo_url, repo, node, bundle_url=None):
+    """Schedule CI from a record in the push table."""
+    print('scheduling CI for revision %s on %s' % (node, repo))
     dynamodb = boto3.resource('dynamodb')
     ec2 = boto3.resource('ec2')
     s3 = boto3.resource('s3')
@@ -203,15 +248,25 @@
 
     # TODO we should build AMIs using in-repo code so all jobs are using an
     # appropriate AMI for the revision.
+    # TODO we should store metadata in the push record that allows specifying
+    # which jobs to run.
     for image in ec2.images.filter(Owners=['self']):
         if image.name == 'hg-linux-dev-debian9':
-            schedule_linux_ci(job_table, sqs, sqs_url, bucket, repo_url, repo, node, image, 'debian9')
+            schedule_linux_ci(job_table, sqs, sqs_url, bucket, repo_url, repo,
+                              node, image, 'debian9',
+                              bundle_url=bundle_url)
         elif image.name == 'hg-linux-dev-debian10':
-            schedule_linux_ci(job_table, sqs, sqs_url, bucket, repo_url, repo, node, image, 'debian10')
+            schedule_linux_ci(job_table, sqs, sqs_url, bucket, repo_url, repo,
+                              node, image, 'debian10',
+                              bundle_url=bundle_url)
         elif image.name == 'hg-linux-dev-ubuntu18.04':
-            schedule_linux_ci(job_table, sqs, sqs_url, bucket, repo_url, repo, node, image, 'ubuntu18.04')
+            schedule_linux_ci(job_table, sqs, sqs_url, bucket, repo_url, repo,
+                              node, image, 'ubuntu18.04',
+                              bundle_url=bundle_url)
         elif image.name == 'hg-linux-dev-ubuntu19.04':
-            schedule_linux_ci(job_table, sqs, sqs_url, bucket, repo_url, repo, node, image, 'ubuntu19.04')
+            schedule_linux_ci(job_table, sqs, sqs_url, bucket, repo_url, repo,
+                              node, image, 'ubuntu19.04',
+                              bundle_url=bundle_url)
 
 
 RUN_TESTS_LINUX = '''
@@ -219,20 +274,38 @@
 
 HG=/hgdev/venv-bootstrap/bin/hg
 
+REPO_URL=$1
+NODE=$2
+PYTHON=$3
+S3_URL=$4
+BUNDLE_URL=$5
+
 cd /hgwork/src
 
-${HG} pull -r $2 $1
-${HG} log -r $2
-${HG} up $2
+if [ -z "${BUNDLE_URL}" ]; then
+  echo "pulling $NODE from $REPO_URL"
+  ${HG} pull -r $NODE $REPO_URL
+else
+  echo "pulling $REPO_URL"
+  ${HG} pull $REPO_URL
+  echo "fetching bundle from $BUNDLE_URL"
+  aws s3 cp $BUNDLE_URL bundle.hg
+  echo "applying bundle"
+  ${HG} unbundle bundle.hg
+fi
+
+# Bail immediately if we can't find the requested revision.
+${HG} log -r $NODE || exit 1
+${HG} up $NODE || exit 1
 
 export TMPDIR=/hgwork/tmp
 cd tests
-time $3 ./run-tests.py --json 2>&1 | tee output.log
+time $PYTHON ./run-tests.py --json 2>&1 | tee output.log
 
-aws s3 cp --content-type text/plain --acl public-read output.log $4/output.log
+aws s3 cp --content-type text/plain --acl public-read output.log $S3_URL/output.log
 # The JSON file has a prefix to allow loading in web browsers.
 tail -c +13 report.json > report-truncated.json
-aws s3 cp --content-type application/json --acl public-read report-truncated.json $4/report.json
+aws s3 cp --content-type application/json --acl public-read report-truncated.json $S3_URL/report.json
 '''.lstrip()
 
 
@@ -248,6 +321,7 @@
 # TAG build_number {build_number}
 # TAG s3_bucket {s3_bucket}
 # TAG s3_prefix {s3_prefix}
+# TAG bundle_url {bundle_url}
 
 repo_update: false
 repo_upgrade: false
@@ -264,7 +338,7 @@
     - mkdir /hgwork/tmp
     - chown -R hg:hg /hgwork
     - sudo -u hg -g hg rsync -a /hgdev/src /hgwork/
-    - sudo -u hg -g hg /run-tests-linux {repo_url} {node} {python} s3://{s3_bucket}/{s3_prefix} 2>&1 | tee /ci.log
+    - sudo -u hg -g hg /run-tests-linux {repo_url} {node} {python} s3://{s3_bucket}/{s3_prefix} '{bundle_url}' 2>&1 | tee /ci.log
     - aws s3 cp --content-type text/plain --acl public-read /ci.log s3://{s3_bucket}/{s3_prefix}/ci.log
     - echo done > done
     - aws s3 cp --content-type text/plain --acl public-read done s3://{s3_bucket}/{s3_prefix}/done
@@ -276,7 +350,8 @@
 '''.lstrip()
 
 
-def schedule_linux_ci(job_table, sqs, sqs_url, bucket, repo_url, repo, node, image, os_prefix):
+def schedule_linux_ci(job_table, sqs, sqs_url, bucket, repo_url, repo,
+                      node, image, os_prefix, bundle_url=None):
     block_device_mappings = [
         {
             'DeviceName': image.block_device_mappings[0]['DeviceName'],
@@ -322,6 +397,7 @@
             run_tests_linux_b64=run_tests_linux_b64,
             s3_bucket=bucket.name,
             s3_prefix=bucket_prefix,
+            bundle_url=bundle_url or '',
         )
 
         config = {
@@ -403,7 +479,14 @@
                 continue
 
             kv = line[len('# TAG '):].strip()
-            name, value = kv.split(' ', 1)
+            parts = kv.split(' ', 1)
+
+            # Ignore empty values.
+            if len(parts) == 1:
+                continue
+
+            name, value = parts
+
             tags[name] = value
             set_tags.append({
                 'Key': name,
@@ -564,3 +647,62 @@
                     v[kk] = decimal.Decimal(v[kk])
 
             batch.put_item(Item=v)
+
+
+def process_try_bundle(bundle_bucket, push_table, bundle, node, branch, user,
+                       message, web_url):
+    """Process an incoming Try bundle."""
+    messages = []
+
+    now = datetime.datetime.utcnow().isoformat()
+
+    messages.append('processing bundle of size %d bytes' % len(bundle))
+
+    # Verify we don't already have a Try push for this node. We may need
+    # to reconsider this logic once we can cherry-pick which jobs get
+    # scheduled...
+    res = push_table.query(
+        IndexName='repo-node',
+        KeyConditionExpression=Key('repo').eq('try') & Key('node').eq(node),
+        Select='ALL_PROJECTED_ATTRIBUTES',
+    )
+    if res.get('Items'):
+        messages.append('%s previously submitted; did you forget to amend?' % node)
+        messages.append('rejecting Try submission')
+        return messages
+
+    key = 'bundle/%s.hg' % uuid.uuid4()
+
+    messages.append('uploading bundle to S3: %s' % key)
+    bundle_bucket.put_object(
+        Key=key,
+        Body=bundle,
+        ContentType='application/octet-stream',
+        ACL='private',
+        StorageClass='STANDARD_IA',
+    )
+
+    bundle_url = 's3://%s/%s' % (bundle_bucket.name, key)
+
+    # We record this as a "push" in DynamoDB. The creation of this entry
+    # will result in CI being scheduled.
+    messages.append('recording push in DynamoDB')
+    push_table.put_item(Item={
+        'repo': 'try',
+        'push_id': '%s-try' % now,
+        # Try pushes are always based on the published repository history.
+        'repo_url': 'https://www.mercurial-scm.org/repo/hg',
+        'bundle_url': bundle_url,
+        'repo_name': 'try',
+        'poll_rev': 'try',
+        'push_date': now,
+        'node': node,
+        'branch': branch,
+        'user': user,
+        'message': message,
+     })
+
+    messages.append('push recorded; jobs should start running automatically')
+    messages.append('see results at %s' % web_url)
+
+    return messages
diff --git a/contrib/ci/README.rst b/contrib/ci/README.rst
--- a/contrib/ci/README.rst
+++ b/contrib/ci/README.rst
@@ -4,6 +4,33 @@
 
 This directory defines a CI system for the Mercurial Project.
 
+Try Server
+==========
+
+The CI system features a *Try Server* which allows you to submit
+arbitrary changesets for evaluation. This works by sending a
+Mercurial bundle to a remote server, which ingests it into the CI
+system and schedules it for execution.
+
+Registering
+-----------
+
+Because the *Try Server* is remote code execution as a service,
+we require users to register to use it and all requests must be
+authenticated.
+
+To register a new account, you will need to add the requested user
+to the ``terraform/iam_users.tf`` file. Create a new resource
+using the ``user_account`` module. Typically, you can simply
+copy an existing entry and modify its name and username.
+
+This change will need to be submitted for review and applied by an
+admin of the CI system.
+
+The procedure for obtaining and managing client credentials to
+access the service is available in the ``contrib/automation/README.rst``
+file.
+
 Architecture
 ============
 
@@ -126,6 +153,20 @@
 
 The Terraform code for this component lives in ``web.tf``.
 
+Try Server
+----------
+
+The *try server* components provides a mechanism to trigger CI on arbitrary
+diffs. It allows Mercurial developers to test ad-hoc changes by uploading
+Mercurial bundles to the CI system.
+
+The *try server* works by exposing a Lambda function to privileged
+users. The Lambda function is invoked with a Mercurial bundle and metadata
+describing the requested jobs. The function then stores the bundle in an
+S3 bucket and registers jobs to run against the uploaded bundle.
+
+The Terraform code for this component lives in ``try_server.tf``.
+
 AWS Account Management
 ======================
 
@@ -204,13 +245,14 @@
 We have no mechanism to retrigger a job. This requires some form of
 authentication to prevent abuse.
 
-We have no mechanism to trigger CI on arbitrary diffs. We would like
-to provide some kind of *try server* where you can submit a diff and
-the system builds it. Again, this requires some form of authentication.
-
 We have no mechanism to choose which jobs to execute. We probably want
 to build this because there is no need to execute all jobs all the time.
 
+The association between jobs, job results, and pushes is a bit wonky.
+Things can get confused if there are multiple pushes for the same
+changeset. We may need to change the primary key of the DynamoDB tables
+to fix this.
+
 Development Workflow
 ====================
 



To: indygreg, #hg-reviewers
Cc: mercurial-devel


More information about the Mercurial-devel mailing list