diff --git a/.buildkite/job-version-bump-phase2.json.py b/.buildkite/job-version-bump-phase2.json.py new file mode 100755 index 000000000..a05f2664a --- /dev/null +++ b/.buildkite/job-version-bump-phase2.json.py @@ -0,0 +1,83 @@ +#!/usr/bin/env python +# Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +# or more contributor license agreements. Licensed under the Elastic License +# 2.0 and the following additional limitation. Functionality enabled by the +# files subject to the Elastic License 2.0 may only be used in production when +# invoked by an Elasticsearch process with a license key installed that permits +# use of machine learning features. You may not use this file except in +# compliance with the Elastic License 2.0 and the foregoing additional +# limitation. +# +# Phase 2 of the ml-cpp version-bump pipeline (uploaded by +# dev-tools/version_bump_upload_phase2.sh after validate). Step-level `if` cannot +# use Buildkite meta-data; gating is done in that shell script instead. + +import contextlib +import json +import os + + +WOLFI_IMAGE = "docker.elastic.co/release-eng/wolfi-build-essential-release-eng:latest" + + +def main(): + pipeline_steps = [ + { + "label": "Bump version to ${NEW_VERSION}", + "key": "bump-version", + "depends_on": "schedule-version-bump-follow-up", + "agents": { + "image": WOLFI_IMAGE, + "cpu": "250m", + "memory": "512Mi", + }, + "env": { + "VERSION_BUMP_MERGE_AUTO": os.environ.get("VERSION_BUMP_MERGE_AUTO", "true"), + }, + "command": [ + "dev-tools/bump_version.sh", + ], + }, + { + "label": "Notify :slack: — version bump PR needs approval", + "key": "queue-slack-notify", + "depends_on": "bump-version", + "command": [ + ".buildkite/pipelines/send_slack_version_bump_notification.sh", + ], + "agents": { + # Same image as bump-version: the minimal python image does not ship + # buildkite-agent, so meta-data get / pipeline upload silently skipped Slack. + "image": WOLFI_IMAGE, + "cpu": "250m", + "memory": "512Mi", + }, + }, + { + "label": "Fetch DRA Artifacts", + "key": "fetch-dra-artifacts", + "depends_on": "queue-slack-notify", + "agents": { + "image": WOLFI_IMAGE, + "cpu": "250m", + "memory": "512Mi", + "ephemeralStorage": "1Gi", + }, + "command": [ + "python3", + "dev-tools/wait_version_bump_dra.py", + ], + "timeout_in_minutes": 240, + "retry": { + "automatic": [{"exit_status": "*", "limit": 2}], + "manual": {"permit_on_passed": True}, + }, + }, + ] + + print(json.dumps({"steps": pipeline_steps}, indent=2)) + + +if __name__ == "__main__": + with contextlib.suppress(KeyboardInterrupt): + main() diff --git a/.buildkite/job-version-bump.json.py b/.buildkite/job-version-bump.json.py index 61f763987..1ae79ac6a 100755 --- a/.buildkite/job-version-bump.json.py +++ b/.buildkite/job-version-bump.json.py @@ -8,87 +8,50 @@ # compliance with the Elastic License 2.0 and the foregoing additional # limitation. # -# This script generates JSON for the ml-cpp version bump pipeline. -# It is intended to be triggered by the centralized release-eng pipeline. -# It can be integrated into existing or new workflows and includes a plugin -# that polls artifact URLs until the expected version is available. - +# Phase 1 of the ml-cpp version bump pipeline (dynamic upload from release-eng). +# +# Buildkite step `if` expressions cannot use build meta-data (see +# https://buildkite.com/docs/pipelines/conditionals ). validate_version_bump_params.sh +# sets ml_cpp_version_bump_noop when origin already matches NEW_VERSION; phase 2 +# (Slack, bump, DRA wait) is uploaded only when needed by +# dev-tools/version_bump_upload_phase2.sh. import contextlib import json +WOLFI_IMAGE = "docker.elastic.co/release-eng/wolfi-build-essential-release-eng:latest" + + def main(): - pipeline = {} - # TODO: replace the block step with version bump logic pipeline_steps = [ { - "label": "Queue a :slack: notification for the pipeline", + "label": "Validate version bump parameters", + "key": "validate-version-bump", "depends_on": None, - "command": ".buildkite/pipelines/send_version_bump_notification.sh | buildkite-agent pipeline upload", "agents": { - "image": "python", - }, - }, - { - "block": "Ready to fetch for DRA artifacts?", - "prompt": ( - "Unblock when your team is ready to proceed.\n\n" - "Trigger parameters:\n" - "- NEW_VERSION: ${NEW_VERSION}\n" - "- BRANCH: ${BRANCH}\n" - "- WORKFLOW: ${WORKFLOW}\n" - ), - "key": "block-get-dra-artifacts", - "blocked_state": "running", - }, - { - "label": "Fetch DRA Artifacts", - "key": "fetch-dra-artifacts", - "depends_on": "block-get-dra-artifacts", - "agents": { - "image": "docker.elastic.co/release-eng/wolfi-build-essential-release-eng:latest", + "image": WOLFI_IMAGE, "cpu": "250m", "memory": "512Mi", - "ephemeralStorage": "1Gi", }, "command": [ - 'echo "Starting DRA artifacts retrieval..."', + "dev-tools/validate_version_bump_params.sh", ], - "timeout_in_minutes": 240, - "retry": { - "automatic": [ - { - "exit_status": "*", - "limit": 2, - } - ], - "manual": {"permit_on_passed": True}, + }, + { + "label": "Schedule version bump follow-up steps", + "key": "schedule-version-bump-follow-up", + "depends_on": "validate-version-bump", + "agents": { + "image": "python", }, - "plugins": [ - { - "elastic/json-watcher#v1.0.0": { - "url": "https://artifacts-staging.elastic.co/ml-cpp/latest/${BRANCH}.json", - "field": ".version", - "expected_value": "${NEW_VERSION}", - "polling_interval": "30", - } - }, - { - "elastic/json-watcher#v1.0.0": { - "url": "https://storage.googleapis.com/elastic-artifacts-snapshot/ml-cpp/latest/${BRANCH}.json", - "field": ".version", - "expected_value": "${NEW_VERSION}-SNAPSHOT", - "polling_interval": "30", - } - }, + "command": [ + "dev-tools/version_bump_upload_phase2.sh", ], }, ] - pipeline["steps"] = pipeline_steps - - print(json.dumps(pipeline, indent=2)) + print(json.dumps({"steps": pipeline_steps}, indent=2)) if __name__ == "__main__": diff --git a/.buildkite/pipelines/format_and_validation.yml.sh b/.buildkite/pipelines/format_and_validation.yml.sh index c6484d9cb..d050c2471 100755 --- a/.buildkite/pipelines/format_and_validation.yml.sh +++ b/.buildkite/pipelines/format_and_validation.yml.sh @@ -18,6 +18,7 @@ steps: notify: - github_commit_status: context: "Validate formatting with clang-format" + - label: "dev-tools pytest" key: "dev_tools_pytest" command: ".buildkite/scripts/steps/dev_tools_pytest.sh" diff --git a/.buildkite/pipelines/send_slack_version_bump_notification.sh b/.buildkite/pipelines/send_slack_version_bump_notification.sh new file mode 100755 index 000000000..9105d3d41 --- /dev/null +++ b/.buildkite/pipelines/send_slack_version_bump_notification.sh @@ -0,0 +1,77 @@ +#!/usr/bin/env bash +# Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +# or more contributor license agreements. Licensed under the Elastic License +# 2.0 and the following additional limitation. Functionality enabled by the +# files subject to the Elastic License 2.0 may only be used in production when +# invoked by an Elasticsearch process with a license key installed that permits +# use of machine learning features. You may not use this file except in +# compliance with the Elastic License 2.0 and the foregoing additional +# limitation. +# +# Single Slack notification for the ml-cpp-version-bump pipeline: runs after the +# bump step opens the PR. Reads ml_cpp_version_bump_pr_url from Buildkite meta-data +# (set by dev-tools/bump_version.sh) and posts the PR link so reviewers can approve. +# +# Slack notify must live on the step (see Buildkite docs): build-level notify fires only +# on build.finished — after every downstream step including long DRA waits — so the +# message would appear hours late or never if someone checks earlier. +# +# Optional env: +# ML_CPP_VERSION_BUMP_SLACK_CHANNEL — override channel (default #machine-learn-build) + +set -euo pipefail + +CHANNEL="${ML_CPP_VERSION_BUMP_SLACK_CHANNEL:-#machine-learn-build}" + +if [[ "${BUILDKITE:-}" != "true" ]]; then + echo "BUILDKITE is not true — skipping Slack notification (local run)." + exit 0 +fi + +if ! command -v buildkite-agent >/dev/null 2>&1; then + echo "ERROR: buildkite-agent not in PATH; cannot read meta-data or upload Slack notify pipeline." >&2 + echo "Use the same agent image as bump-version (Wolfi), not a minimal python image." >&2 + exit 1 +fi + +pr_url="" +changed="false" +pr_url=$(buildkite-agent meta-data get "ml_cpp_version_bump_pr_url" 2>/dev/null || true) +changed=$(buildkite-agent meta-data get "ml_cpp_version_bump_changed" 2>/dev/null || echo "false") +# Meta-data values must not contain stray whitespace (Breaks truthiness.) +pr_url=$(echo -n "${pr_url}" | tr -d '\r') +changed=$(echo -n "${changed}" | tr -d '\r') + +if [[ -z "${pr_url}" && "${changed}" != "true" ]]; then + echo "No version-bump PR opened (pr_url empty, ml_cpp_version_bump_changed=${changed}); skipping Slack notification." + exit 0 +fi + +if [[ -z "${pr_url}" && "${changed}" == "true" ]]; then + body_line="DRY RUN — no pull request URL (simulated bump)." +else + body_line="Pull request (approval required): ${pr_url}" +fi + +( + cat </dev/null || true) + if [[ "$url" =~ github\.com[:/]([^/]+)/([^/.]+)(\.git)?$ ]]; then + echo "${BASH_REMATCH[1]}/${BASH_REMATCH[2]}" + return 0 + fi + echo "ERROR: could not parse owner/repo from git remote url: ${url:-empty}" >&2 + return 1 +} + +topic_branch_name() { + local tb + if [[ -n "${VERSION_BUMP_TOPIC_BRANCH:-}" ]]; then + echo "${VERSION_BUMP_TOPIC_BRANCH}" + return 0 + fi + tb="ci/ml-cpp-version-bump-${BRANCH}-${NEW_VERSION}" + if [[ -n "${BUILDKITE_BUILD_NUMBER:-}" ]]; then + tb="${tb}-bk${BUILDKITE_BUILD_NUMBER}" + fi + echo "$tb" +} + +sed_inplace() { + if sed --version >/dev/null 2>&1; then + sed -i "$@" + else + sed -i '' "$@" + fi +} + +configure_git() { + git config user.name elasticsearchmachine + git config user.email 'infra-root+elasticsearchmachine@elastic.co' +} + +# Record whether this run actually opened a version-bump PR (for Buildkite DRA wait gating). +version_bump_set_buildkite_meta_changed() { + local changed="$1" + if [[ "${BUILDKITE:-}" != "true" ]]; then + return 0 + fi + if ! command -v buildkite-agent >/dev/null 2>&1; then + echo "WARNING: BUILDKITE=true but buildkite-agent not in PATH; skipping meta-data ml_cpp_version_bump_changed=${changed}" >&2 + return 0 + fi + buildkite-agent meta-data set "ml_cpp_version_bump_changed" "$changed" +} + +# PR URL for the Slack step (after bump). Omit calling when there is no URL — Buildkite +# rejects meta-data set with an empty value ("value cannot be empty…"). +version_bump_set_pr_url_meta() { + local url="${1:-}" + if [[ -z "${url}" ]]; then + return 0 + fi + if [[ "${BUILDKITE:-}" != "true" ]]; then + return 0 + fi + if ! command -v buildkite-agent >/dev/null 2>&1; then + echo "WARNING: BUILDKITE=true but buildkite-agent not in PATH; skipping meta-data ml_cpp_version_bump_pr_url" >&2 + return 0 + fi + buildkite-agent meta-data set "ml_cpp_version_bump_pr_url" "$url" +} + +bump_version_via_pr() { + local target_branch="$1" + local target_version="$2" + local topic_branch current_version repo_slug pr_url + + # Default: no DRA wait unless we open a PR (or DRY_RUN simulates one). + version_bump_set_buildkite_meta_changed false + + topic_branch=$(topic_branch_name) + + git fetch origin "$target_branch" + + # Topic branch starts at release-branch tip (same tree validation uses). + git checkout -B "$topic_branch" "origin/${target_branch}" + + current_version=$( + grep '^elasticsearchVersion=' "$GRADLE_PROPS" | head -1 | cut -d= -f2 | tr -d '[:space:]' || true + ) + if [[ -z "$current_version" ]]; then + echo "ERROR: could not read elasticsearchVersion from ${GRADLE_PROPS} on origin/${target_branch}" >&2 + exit 1 + fi + + if ! "$PYTHON" "$VALIDATION_PY" validate \ + --current "$current_version" \ + --new "$target_version" \ + --branch "$target_branch" + then + echo "ERROR: version bump does not match branch tip after fetch (current=${current_version}, target=${target_version})." >&2 + echo "Refusing to rewrite elasticsearchVersion — resolve manually if another automation advanced the branch." >&2 + exit 1 + fi + + if [ "$current_version" = "$target_version" ]; then + echo "Version on origin/${target_branch} is already ${target_version} — nothing to do" + return 0 + fi + + echo "Bumping version via PR branch ${topic_branch}: ${current_version} → ${target_version} (base ${target_branch})" + sed_inplace "s/^elasticsearchVersion=.*/elasticsearchVersion=${target_version}/" "$GRADLE_PROPS" + + if ! grep -q "^elasticsearchVersion=${target_version}$" "$GRADLE_PROPS"; then + echo "ERROR: version update verification failed on ${topic_branch}" + grep 'elasticsearchVersion' "$GRADLE_PROPS" + exit 1 + fi + + if git diff-index --quiet HEAD --; then + echo "No changes to commit (file unchanged after sed)" + return 0 + fi + + configure_git + git add "$GRADLE_PROPS" + git commit -m "[ML] Bump version to ${target_version}" + + if [ "$DRY_RUN" = "true" ]; then + echo " [DRY RUN] Would push origin ${topic_branch} and open PR into ${target_branch}" + version_bump_set_buildkite_meta_changed true + return 0 + fi + + git push -u origin "$topic_branch" + echo " Pushed topic branch ${topic_branch}" + + repo_slug=$(github_repo_slug) || exit 1 + + local pr_body + pr_body="$(cat </dev/null 2>&1; then + "${SCRIPT_DIR}/ensure_github_cli.sh" +fi + +if ! command -v gh >/dev/null 2>&1; then + echo "ERROR: GitHub CLI (gh) is not available; see dev-tools/ensure_github_cli.sh" >&2 + exit 1 +fi + +REPO="" +BASE="" +HEAD_REF="" +TITLE="" +BODY="" +DO_MERGE="false" +DO_MERGE_AUTO="false" +MERGE_METHOD="${VERSION_BUMP_MERGE_METHOD:-squash}" + +while [[ $# -gt 0 ]]; do + case "$1" in + --repo) + REPO="$2" + shift 2 + ;; + --base) + BASE="$2" + shift 2 + ;; + --head) + HEAD_REF="$2" + shift 2 + ;; + --title) + TITLE="$2" + shift 2 + ;; + --body) + BODY="$2" + shift 2 + ;; + --merge) + DO_MERGE="true" + shift 1 + ;; + --merge-auto) + DO_MERGE_AUTO="true" + shift 1 + ;; + --merge-method) + MERGE_METHOD="$2" + shift 2 + ;; + *) + echo "ERROR: unknown argument: $1" >&2 + exit 1 + ;; + esac +done + +if [[ "$DO_MERGE" == "true" && "$DO_MERGE_AUTO" == "true" ]]; then + echo "ERROR: use only one of --merge or --merge-auto." >&2 + exit 1 +fi + +if [[ -z "$REPO" || -z "$BASE" || -z "$HEAD_REF" || -z "$TITLE" ]]; then + echo "ERROR: --repo, --base, --head, and --title are required." >&2 + exit 1 +fi + +case "$MERGE_METHOD" in + merge) MERGE_TYPE=(--merge) ;; + squash) MERGE_TYPE=(--squash) ;; + rebase) MERGE_TYPE=(--rebase) ;; + *) + echo "ERROR: invalid merge method: ${MERGE_METHOD}" >&2 + exit 1 + ;; +esac + +# gh honors GH_TOKEN; validate after CLI args so invalid flag combinations fail without secrets. +if [[ -z "${GH_TOKEN:-}" ]]; then + if [[ -n "${GITHUB_TOKEN:-}" ]]; then + export GH_TOKEN="${GITHUB_TOKEN}" + elif [[ -n "${VAULT_GITHUB_TOKEN:-}" ]]; then + export GH_TOKEN="${VAULT_GITHUB_TOKEN}" + fi +fi + +if [[ -z "${GH_TOKEN:-}" ]]; then + echo "ERROR: Set GITHUB_TOKEN, VAULT_GITHUB_TOKEN, or GH_TOKEN for gh auth." >&2 + exit 1 +fi + +PR_URL=$(gh pr create \ + --repo "$REPO" \ + --base "$BASE" \ + --head "$HEAD_REF" \ + --title "$TITLE" \ + --body "$BODY") + +echo "$PR_URL" + +if [[ "$DO_MERGE" == "true" || "$DO_MERGE_AUTO" == "true" ]]; then + # Older packaged gh (e.g. Wolfi apk) does not support --yes on pr merge; rely on + # non-TTY / GH_PROMPT_DISABLED for unattended merges. + declare -a merge_admin=() + if [[ "${VERSION_BUMP_MERGE_ADMIN:-}" == "true" ]]; then + merge_admin+=(--admin) + fi + if [[ "$DO_MERGE_AUTO" == "true" ]]; then + GH_PROMPT_DISABLED=1 gh pr merge "$PR_URL" --auto "${MERGE_TYPE[@]}" "${merge_admin[@]}" + echo "Enabled auto-merge: ${PR_URL}" >&2 + else + GH_PROMPT_DISABLED=1 gh pr merge "$PR_URL" "${MERGE_TYPE[@]}" "${merge_admin[@]}" + echo "Merged: ${PR_URL}" >&2 + fi +fi diff --git a/dev-tools/ensure_github_cli.sh b/dev-tools/ensure_github_cli.sh new file mode 100755 index 000000000..df0cbae2c --- /dev/null +++ b/dev-tools/ensure_github_cli.sh @@ -0,0 +1,87 @@ +#!/bin/bash +# +# Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +# or more contributor license agreements. Licensed under the Elastic License +# 2.0 and the following additional limitation. Functionality enabled by the +# files subject to the Elastic License 2.0 may only be used in production when +# invoked by an Elasticsearch process with a license key installed that permits +# use of machine learning features. You may not use this file except in +# compliance with the Elastic License 2.0 and the foregoing additional +# limitation. +# +# Ensure the GitHub CLI (gh) is available on PATH. Used by automated PR flows +# (e.g. dev-tools/create_github_pull_request.sh) when the image does not +# pre-install gh (Wolfi: try apk; Linux: fall back to GitHub release tarball). +# +# Environment: +# SKIP_GH_AUTO_INSTALL — set to true to skip and exit non-zero if gh is missing +# GH_CLI_VERSION — pinned release for tarball fallback (default below) + +set -euo pipefail + +if command -v gh >/dev/null 2>&1; then + exit 0 +fi + +if [[ "${SKIP_GH_AUTO_INSTALL:-}" == "true" ]]; then + echo "ERROR: gh not found and SKIP_GH_AUTO_INSTALL=true" >&2 + exit 1 +fi + +echo "Installing GitHub CLI (gh)..." >&2 + +# Wolfi / Alpine-style images (ml-cpp version-bump uses release-eng Wolfi) +if command -v apk >/dev/null 2>&1; then + if apk add --no-cache gh 2>/dev/null || apk add --no-cache github-cli 2>/dev/null; then + command -v gh >/dev/null 2>&1 && exit 0 + fi +fi + +OS=$(uname -s) +ARCH=$(uname -m) +if [[ "$OS" != Linux ]]; then + echo "ERROR: gh not installed; on ${OS} install from https://cli.github.com/ (e.g. brew install gh)." >&2 + exit 1 +fi + +case "$ARCH" in + x86_64) GH_ARCH=amd64 ;; + aarch64 | arm64) GH_ARCH=arm64 ;; + *) + echo "ERROR: unsupported Linux machine type for gh tarball: ${ARCH}" >&2 + exit 1 + ;; +esac + +GH_CLI_VERSION="${GH_CLI_VERSION:-2.63.2}" +PREFIX="${GH_CLI_INSTALL_PREFIX:-/usr/local}" +BIN_DIR="${PREFIX}/bin" +if ! mkdir -p "$BIN_DIR" 2>/dev/null || [[ ! -w "$BIN_DIR" ]]; then + echo "ERROR: cannot write gh to ${BIN_DIR}; install gh manually or run as a user that can write there." >&2 + exit 1 +fi + +TMP=$(mktemp -d) +trap 'rm -rf "${TMP}"' EXIT + +URL="https://github.com/cli/cli/releases/download/v${GH_CLI_VERSION}/gh_${GH_CLI_VERSION}_linux_${GH_ARCH}.tar.gz" +if ! curl -fsSL "$URL" -o "${TMP}/gh.tgz"; then + echo "ERROR: failed to download gh ${GH_CLI_VERSION} from GitHub releases (set GH_CLI_VERSION?)." >&2 + exit 1 +fi + +tar -xzf "${TMP}/gh.tgz" -C "${TMP}" +GH_BIN=$(find "${TMP}" -path '*/bin/gh' -type f | head -1) +if [[ -z "${GH_BIN}" ]]; then + echo "ERROR: gh binary not found in release archive." >&2 + exit 1 +fi + +install -m 0755 "${GH_BIN}" "${BIN_DIR}/gh" +hash -r 2>/dev/null || true +echo "Installed gh to ${BIN_DIR}/gh" >&2 + +if ! command -v gh >/dev/null 2>&1; then + echo "ERROR: gh still not on PATH after install (ensure ${BIN_DIR} is on PATH)." >&2 + exit 1 +fi diff --git a/dev-tools/unittest/test_job_version_bump_pipeline.py b/dev-tools/unittest/test_job_version_bump_pipeline.py new file mode 100644 index 000000000..1e077948e --- /dev/null +++ b/dev-tools/unittest/test_job_version_bump_pipeline.py @@ -0,0 +1,147 @@ +#!/usr/bin/env python3 +# Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +# or more contributor license agreements. Licensed under the Elastic License +# 2.0 and the following additional limitation. Functionality enabled by the +# files subject to the Elastic License 2.0 may only be used in production when +# invoked by an Elasticsearch process with a license key installed that permits +# use of machine learning features. You may not use this file except in +# compliance with the Elastic License 2.0 and the foregoing additional +# limitation. + +"""Tests for .buildkite/job-version-bump*.json.py pipeline generators.""" + +from __future__ import annotations + +import json +import os +import subprocess +import sys +from pathlib import Path + +_REPO_ROOT = Path(__file__).resolve().parents[2] +_PIPELINE_PHASE1 = _REPO_ROOT / ".buildkite" / "job-version-bump.json.py" +_PIPELINE_PHASE2 = _REPO_ROOT / ".buildkite" / "job-version-bump-phase2.json.py" + + +def _run_phase1(extra_env: dict[str, str] | None = None) -> dict: + env = os.environ.copy() + env.pop("VERSION_BUMP_MERGE_AUTO", None) + if extra_env: + env.update(extra_env) + out = subprocess.check_output( + [sys.executable, str(_PIPELINE_PHASE1)], + cwd=str(_REPO_ROOT), + env=env, + text=True, + ) + return json.loads(out) + + +def _run_phase2(extra_env: dict[str, str] | None = None) -> dict: + env = os.environ.copy() + env.pop("VERSION_BUMP_MERGE_AUTO", None) + if extra_env: + env.update(extra_env) + out = subprocess.check_output( + [sys.executable, str(_PIPELINE_PHASE2)], + cwd=str(_REPO_ROOT), + env=env, + text=True, + ) + return json.loads(out) + + +def _step_by_key(pipeline: dict, key: str) -> dict: + return next(s for s in pipeline["steps"] if s.get("key") == key) + + +def test_phase1_has_validate_and_schedule_only() -> None: + pipeline = _run_phase1() + keys = [s.get("key") for s in pipeline["steps"]] + assert keys == ["validate-version-bump", "schedule-version-bump-follow-up"] + + +def test_phase1_has_no_step_if_using_meta_data() -> None: + """Buildkite rejects build.meta_data in step if expressions at pipeline upload.""" + pipeline = _run_phase1() + for step in pipeline["steps"]: + cond = step.get("if") + if cond is None: + continue + assert "build.meta_data" not in cond + + +def test_phase1_schedule_depends_on_validate() -> None: + pipeline = _run_phase1() + sched = _step_by_key(pipeline, "schedule-version-bump-follow-up") + assert sched["depends_on"] == "validate-version-bump" + assert sched["command"] == ["dev-tools/version_bump_upload_phase2.sh"] + + +def test_phase2_bump_defaults_merge_auto_true() -> None: + pipeline = _run_phase2() + bump = _step_by_key(pipeline, "bump-version") + assert bump["env"]["VERSION_BUMP_MERGE_AUTO"] == "true" + + +def test_phase2_bump_respects_merge_auto_override_false() -> None: + pipeline = _run_phase2({"VERSION_BUMP_MERGE_AUTO": "false"}) + bump = _step_by_key(pipeline, "bump-version") + assert bump["env"]["VERSION_BUMP_MERGE_AUTO"] == "false" + + +def test_phase2_dra_uses_wait_script_not_meta_in_if() -> None: + pipeline = _run_phase2() + dra = _step_by_key(pipeline, "fetch-dra-artifacts") + assert "if" not in dra + assert "plugins" not in dra + assert dra["command"] == ["python3", "dev-tools/wait_version_bump_dra.py"] + + +def test_phase2_order_bump_then_slack_then_dra() -> None: + pipeline = _run_phase2() + assert ( + _step_by_key(pipeline, "bump-version")["depends_on"] + == "schedule-version-bump-follow-up" + ) + assert _step_by_key(pipeline, "queue-slack-notify")["depends_on"] == "bump-version" + slack_cmd = _step_by_key(pipeline, "queue-slack-notify")["command"] + assert slack_cmd == [".buildkite/pipelines/send_slack_version_bump_notification.sh"] + assert ( + _step_by_key(pipeline, "fetch-dra-artifacts")["depends_on"] + == "queue-slack-notify" + ) + + +def test_phase2_slack_step_uses_same_agent_image_as_bump() -> None: + """Slack step must run where buildkite-agent is available (see send_slack script).""" + pipeline = _run_phase2() + bump_img = _step_by_key(pipeline, "bump-version")["agents"]["image"] + slack_img = _step_by_key(pipeline, "queue-slack-notify")["agents"]["image"] + assert slack_img == bump_img + + +def test_mutually_exclusive_merge_flags_script() -> None: + """create_github_pull_request.sh rejects --merge and --merge-auto together.""" + script = _REPO_ROOT / "dev-tools" / "create_github_pull_request.sh" + proc = subprocess.run( + [ + "bash", + str(script), + "--repo", + "r/r", + "--base", + "b", + "--head", + "h", + "--title", + "t", + "--merge", + "--merge-auto", + ], + cwd=str(_REPO_ROOT), + capture_output=True, + text=True, + ) + assert proc.returncode != 0 + assert "only one of --merge or --merge-auto" in proc.stderr diff --git a/dev-tools/unittest/test_version_bump_validation.py b/dev-tools/unittest/test_version_bump_validation.py new file mode 100644 index 000000000..bb20fdc18 --- /dev/null +++ b/dev-tools/unittest/test_version_bump_validation.py @@ -0,0 +1,303 @@ +#!/usr/bin/env python3 +# Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +# or more contributor license agreements. Licensed under the Elastic License +# 2.0 and the following additional limitation. Functionality enabled by the +# files subject to the Elastic License 2.0 may only be used in production when +# invoked by an Elasticsearch process with a license key installed that permits +# use of machine learning features. You may not use this file except in +# compliance with the Elastic License 2.0 and the foregoing additional +# limitation. + +"""Pytest tests for dev-tools/version_bump_validation.py (Buildkite bump rules). + +Integration tests (real ``git fetch`` + ``validate_version_bump_params.sh``) are +opt-in so CI stays deterministic: + + export VERSION_BUMP_GIT_INTEGRATION=1 + export VERSION_BUMP_TEST_BRANCH=9.5 # MAJOR.MINOR branch that exists on origin + python3 -m pip install -r dev-tools/test-requirements.txt + ./dev-tools/run_dev_tools_tests.sh + +Optional: ``VERSION_BUMP_SKIP_NEGATIVE_INTEGRATION=1`` to skip the negative +``patch+2`` check only. +""" + +from __future__ import annotations + +import os +import subprocess +import sys +from pathlib import Path + +import pytest + +_DEV_TOOLS = Path(__file__).resolve().parents[1] +if str(_DEV_TOOLS) not in sys.path: + sys.path.insert(0, str(_DEV_TOOLS)) + +import version_bump_validation as vbu # noqa: E402 + +_REPO_ROOT = _DEV_TOOLS.parent +_VALIDATOR_SCRIPT = _DEV_TOOLS / "validate_version_bump_params.sh" +_MODULE = _DEV_TOOLS / "version_bump_validation.py" + + +def test_parse_semver_ok() -> None: + assert vbu.parse_semver("9.5.1") == (9, 5, 1) + + +def test_parse_semver_rejects() -> None: + assert vbu.parse_semver("9.5") is None + assert vbu.parse_semver("v9.5.0") is None + assert vbu.parse_semver("9.5.0.1") is None + + +def test_patch_ok_consecutive() -> None: + vbu.validate_version_bump_params( + current_version="9.5.0", + new_version="9.5.1", + branch="9.5", + ) + + +def test_patch_ok_noop_same_version() -> None: + vbu.validate_version_bump_params( + current_version="9.5.1", + new_version="9.5.1", + branch="9.5", + ) + + +def test_patch_rejects_skip() -> None: + with pytest.raises(ValueError): + vbu.validate_version_bump_params( + current_version="9.5.0", + new_version="9.5.2", + branch="9.5", + ) + + +def test_patch_rejects_wrong_release_branch() -> None: + with pytest.raises(ValueError): + vbu.validate_version_bump_params( + current_version="9.5.0", + new_version="9.5.1", + branch="9.4", + ) + + +def test_patch_rejects_major_minor_mismatch() -> None: + with pytest.raises(ValueError): + vbu.validate_version_bump_params( + current_version="9.4.9", + new_version="9.5.1", + branch="9.5", + ) + + +def test_cli_validate_patch_ok() -> None: + rc = subprocess.call( + [ + sys.executable, + str(_MODULE), + "validate", + "--current", + "9.5.0", + "--new", + "9.5.1", + "--branch", + "9.5", + ], + cwd=str(_REPO_ROOT), + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + ) + assert rc == 0 + + +def test_cli_validate_patch_negative() -> None: + rc = subprocess.call( + [ + sys.executable, + str(_MODULE), + "validate", + "--current", + "9.5.0", + "--new", + "9.5.2", + "--branch", + "9.5", + ], + cwd=str(_REPO_ROOT), + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + ) + assert rc != 0 + + +@pytest.mark.skipif( + not _VALIDATOR_SCRIPT.is_file(), + reason="validate_version_bump_params.sh missing", +) +def test_shell_skip_validation_env() -> None: + env = os.environ.copy() + env["SKIP_VERSION_VALIDATION"] = "true" + env.pop("NEW_VERSION", None) + env.pop("BRANCH", None) + out = subprocess.run( + ["/bin/bash", str(_VALIDATOR_SCRIPT)], + cwd=str(_REPO_ROOT), + env=env, + capture_output=True, + text=True, + timeout=5, + ) + assert out.returncode == 0, out.stderr + out.stdout + + +@pytest.mark.skipif( + not _VALIDATOR_SCRIPT.is_file(), + reason="validate_version_bump_params.sh missing", +) +def test_shell_rejects_non_patch_workflow() -> None: + """Upstream may send WORKFLOW=minor; fail before git fetch.""" + env = os.environ.copy() + env["WORKFLOW"] = "minor" + env["NEW_VERSION"] = "9.5.1" + env["BRANCH"] = "9.5" + env.pop("SKIP_VERSION_VALIDATION", None) + out = subprocess.run( + ["/bin/bash", str(_VALIDATOR_SCRIPT)], + cwd=str(_REPO_ROOT), + env=env, + capture_output=True, + text=True, + timeout=5, + ) + assert out.returncode != 0, out.stderr + out.stdout + assert "WORKFLOW" in out.stderr or "WORKFLOW" in out.stdout + + +def _integration_requested() -> bool: + return os.environ.get("VERSION_BUMP_GIT_INTEGRATION") == "1" + + +def _integration_branch() -> str | None: + b = os.environ.get("VERSION_BUMP_TEST_BRANCH", "").strip() + return b or None + + +def _read_version_from_fetch_head(repo: Path) -> str: + proc = subprocess.run( + ["git", "show", "FETCH_HEAD:gradle.properties"], + cwd=str(repo), + capture_output=True, + text=True, + timeout=60, + ) + if proc.returncode != 0: + raise AssertionError( + f"git show FETCH_HEAD:gradle.properties failed: {proc.stderr}" + ) + for line in proc.stdout.splitlines(): + if line.startswith("elasticsearchVersion="): + return line.split("=", 1)[1].strip() + raise AssertionError("elasticsearchVersion not found in FETCH_HEAD gradle.properties") + + +@pytest.fixture +def git_patch_integration_branch() -> str: + """Release branch MAJOR.MINOR; requires network + origin ref.""" + if not _integration_requested(): + pytest.skip( + "Set VERSION_BUMP_GIT_INTEGRATION=1 and VERSION_BUMP_TEST_BRANCH " + "(e.g. 9.5) to run git integration tests." + ) + br = _integration_branch() + if not br: + pytest.skip("VERSION_BUMP_TEST_BRANCH is not set.") + return br + + +@pytest.mark.integration +@pytest.mark.skipif( + not _VALIDATOR_SCRIPT.is_file(), + reason="validate_version_bump_params.sh missing", +) +def test_integration_patch_validate_script_with_git_fetch(git_patch_integration_branch: str) -> None: + """Run validate_version_bump_params.sh after fetch; NEW_VERSION = patch+1 from origin.""" + branch = git_patch_integration_branch + fetch = subprocess.run( + ["git", "fetch", "origin", branch], + cwd=str(_REPO_ROOT), + capture_output=True, + text=True, + timeout=120, + ) + assert fetch.returncode == 0, fetch.stderr + fetch.stdout + + cur = _read_version_from_fetch_head(_REPO_ROOT) + triple = vbu.parse_semver(cur) + assert triple is not None, f"unexpected elasticsearchVersion on branch: {cur!r}" + maj, mino, pat = triple + new_version = f"{maj}.{mino}.{pat + 1}" + + env = os.environ.copy() + env["NEW_VERSION"] = new_version + env["BRANCH"] = branch + env["WORKFLOW"] = "patch" + env.pop("SKIP_VERSION_VALIDATION", None) + + out = subprocess.run( + ["/bin/bash", str(_VALIDATOR_SCRIPT)], + cwd=str(_REPO_ROOT), + env=env, + capture_output=True, + text=True, + timeout=180, + ) + assert out.returncode == 0, out.stderr + out.stdout + + +@pytest.mark.integration +@pytest.mark.skipif( + not _VALIDATOR_SCRIPT.is_file(), + reason="validate_version_bump_params.sh missing", +) +@pytest.mark.skipif( + os.environ.get("VERSION_BUMP_SKIP_NEGATIVE_INTEGRATION") == "1", + reason="VERSION_BUMP_SKIP_NEGATIVE_INTEGRATION=1", +) +def test_integration_patch_validate_script_rejects_bad_jump(git_patch_integration_branch: str) -> None: + """Same fetch as production path; NEW_VERSION = patch+2 must fail validation.""" + branch = git_patch_integration_branch + fetch = subprocess.run( + ["git", "fetch", "origin", branch], + cwd=str(_REPO_ROOT), + capture_output=True, + text=True, + timeout=120, + ) + assert fetch.returncode == 0, fetch.stderr + fetch.stdout + + cur = _read_version_from_fetch_head(_REPO_ROOT) + triple = vbu.parse_semver(cur) + assert triple is not None + maj, mino, pat = triple + bad_version = f"{maj}.{mino}.{pat + 2}" + + env = os.environ.copy() + env["NEW_VERSION"] = bad_version + env["BRANCH"] = branch + env["WORKFLOW"] = "patch" + env.pop("SKIP_VERSION_VALIDATION", None) + + out = subprocess.run( + ["/bin/bash", str(_VALIDATOR_SCRIPT)], + cwd=str(_REPO_ROOT), + env=env, + capture_output=True, + text=True, + timeout=180, + ) + assert out.returncode != 0, "validator should reject non-consecutive patch bump" diff --git a/dev-tools/validate_version_bump_params.sh b/dev-tools/validate_version_bump_params.sh new file mode 100755 index 000000000..4830cd487 --- /dev/null +++ b/dev-tools/validate_version_bump_params.sh @@ -0,0 +1,107 @@ +#!/bin/bash +# +# Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +# or more contributor license agreements. Licensed under the Elastic License +# 2.0 and the following additional limitation. Functionality enabled by the +# files subject to the Elastic License 2.0 may only be used in production when +# invoked by an Elasticsearch process with a license key installed that permits +# use of machine learning features. You may not use this file except in +# compliance with the Elastic License 2.0 and the foregoing additional +# limitation. +# +# Validates NEW_VERSION / BRANCH against elasticsearchVersion on the +# remote release branch before ml-cpp-version-bump runs bump_version.sh. +# Semantic rules live in version_bump_validation.py (unit-tested). +# +# Environment: +# NEW_VERSION — required target stack version (MAJOR.MINOR.PATCH), unless skipped +# BRANCH — required release branch (e.g. 9.5), unless skipped +# WORKFLOW — optional; defaults to patch. If set by upstream automation, must be +# exactly "patch" (this pipeline does not support minor bumps). +# SKIP_VERSION_VALIDATION — set to "true" to skip (emergency override only) +# PYTHON — interpreter (default: python3) +# +# Buildkite (BUILDKITE=true): sets meta-data ml_cpp_version_bump_noop to true when +# origin/BRANCH already has NEW_VERSION, so downstream Slack/bump steps are skipped. + +set -euo pipefail + +version_bump_set_noop_meta() { + local noop="$1" + if [[ "${BUILDKITE:-}" != "true" ]]; then + return 0 + fi + if ! command -v buildkite-agent >/dev/null 2>&1; then + echo "WARNING: BUILDKITE=true but buildkite-agent not in PATH; skipping meta-data ml_cpp_version_bump_noop=${noop}" >&2 + return 0 + fi + buildkite-agent meta-data set "ml_cpp_version_bump_noop" "$noop" +} + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +PYTHON="${PYTHON:-python3}" +VALIDATION_PY="${SCRIPT_DIR}/version_bump_validation.py" + +SKIP_VERSION_VALIDATION="${SKIP_VERSION_VALIDATION:-false}" + +if [[ "$SKIP_VERSION_VALIDATION" == "true" ]]; then + echo "WARNING: SKIP_VERSION_VALIDATION=true — version increment checks skipped." >&2 + version_bump_set_noop_meta false + exit 0 +fi + +: "${NEW_VERSION:?NEW_VERSION must be set}" +: "${BRANCH:?BRANCH must be set}" + +WORKFLOW="${WORKFLOW:-patch}" +if [[ "$WORKFLOW" != "patch" ]]; then + echo "ERROR: WORKFLOW must be \"patch\" for this pipeline, got \"${WORKFLOW}\"" >&2 + exit 1 +fi + +echo "=== Version bump validation (patch) ===" +echo "WORKFLOW: ${WORKFLOW}" +echo "NEW_VERSION: ${NEW_VERSION}" +echo "BRANCH: ${BRANCH}" + +# Patch-only pipeline (no WORKFLOW=minor): consecutive patch on this release +# branch. Current version is read from origin/${BRANCH} by design — there is no +# minor-line bump mode in dev-tools/version_bump_validation.py or this pipeline. + +echo "Fetching origin/${BRANCH}..." +git fetch origin "$BRANCH" + +if ! git cat-file -e FETCH_HEAD:gradle.properties 2>/dev/null; then + echo "ERROR: gradle.properties missing at FETCH_HEAD (origin/${BRANCH})" >&2 + exit 1 +fi + +# Allow empty result: with pipefail, grep exits 1 when there is no match, which +# would abort the substitution before the explicit empty check below. +CURRENT_VERSION=$( + git show FETCH_HEAD:gradle.properties | grep '^elasticsearchVersion=' | head -1 | cut -d= -f2 | tr -d '[:space:]' || true +) + +if [[ -z "$CURRENT_VERSION" ]]; then + echo "ERROR: could not read elasticsearchVersion from origin/${BRANCH} gradle.properties" >&2 + exit 1 +fi + +echo "Current version on origin/${BRANCH}: ${CURRENT_VERSION}" + +if ! "$PYTHON" "$VALIDATION_PY" validate-and-report \ + --current "$CURRENT_VERSION" \ + --new "$NEW_VERSION" \ + --branch "$BRANCH" +then + exit 1 +fi + +# Match Python strip() semantics for equality (no-op → skip later pipeline steps). +cur_trim=$(echo "$CURRENT_VERSION" | tr -d '[:space:]') +new_trim=$(echo "$NEW_VERSION" | tr -d '[:space:]') +if [[ "$cur_trim" == "$new_trim" ]]; then + version_bump_set_noop_meta true +else + version_bump_set_noop_meta false +fi diff --git a/dev-tools/version_bump_upload_phase2.sh b/dev-tools/version_bump_upload_phase2.sh new file mode 100755 index 000000000..82be7561c --- /dev/null +++ b/dev-tools/version_bump_upload_phase2.sh @@ -0,0 +1,45 @@ +#!/bin/bash +# +# Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +# or more contributor license agreements. Licensed under the Elastic License +# 2.0 and the following additional limitation. Functionality enabled by the +# files subject to the Elastic License 2.0 may only be used in production when +# invoked by an Elasticsearch process with a license key installed that permits +# use of machine learning features. You may not use this file except in +# compliance with the Elastic License 2.0 and the foregoing additional +# limitation. +# +# Second phase of the ml-cpp version-bump pipeline (after validate). Buildkite step +# `if` cannot read build meta-data, so we gate follow-up steps by reading +# ml_cpp_version_bump_noop here and uploading phase-2 YAML only when a bump is needed. + +set -euo pipefail + +if [[ -n "${BUILDKITE_BUILD_CHECKOUT_PATH:-}" ]]; then + cd "${BUILDKITE_BUILD_CHECKOUT_PATH}" +else + ROOT="$(git rev-parse --show-toplevel 2>/dev/null || true)" + if [[ -z "${ROOT}" ]]; then + echo "ERROR: set BUILDKITE_BUILD_CHECKOUT_PATH or run from a git checkout" >&2 + exit 1 + fi + cd "${ROOT}" +fi + +if [[ "${DRY_RUN:-}" == "true" ]]; then + echo "DRY_RUN=true — not scheduling version-bump follow-up steps." + exit 0 +fi + +if ! command -v buildkite-agent >/dev/null 2>&1; then + echo "ERROR: buildkite-agent not found; cannot upload phase-2 pipeline." >&2 + exit 1 +fi + +noop=$(buildkite-agent meta-data get "ml_cpp_version_bump_noop" 2>/dev/null || echo "false") +if [[ "${noop}" == "true" ]]; then + echo "ml_cpp_version_bump_noop=true — branch already at NEW_VERSION; skipping follow-up steps." + exit 0 +fi + +exec python3 .buildkite/job-version-bump-phase2.json.py | buildkite-agent pipeline upload diff --git a/dev-tools/version_bump_validation.py b/dev-tools/version_bump_validation.py new file mode 100644 index 000000000..8d19300ed --- /dev/null +++ b/dev-tools/version_bump_validation.py @@ -0,0 +1,161 @@ +#!/usr/bin/env python3 +# Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +# or more contributor license agreements. Licensed under the Elastic License +# 2.0 and the following additional limitation. Functionality enabled by the +# files subject to the Elastic License 2.0 may only be used in production when +# invoked by an Elasticsearch process with a license key installed that permits +# use of machine learning features. You may not use this file except in +# compliance with the Elastic License 2.0 and the foregoing additional +# limitation. +# +"""Rules for ml-cpp patch release version bump parameters (Buildkite / release-eng). + +Used by dev-tools/validate_version_bump_params.sh and unit-tested under +dev-tools/unittest/. + +Run tests from repo root (install dev-tools test deps first, see +``dev-tools/run_dev_tools_tests.sh``): + + python3 -m pip install -r dev-tools/test-requirements.txt + ./dev-tools/run_dev_tools_tests.sh + +Optional git integration (real ``git fetch`` + shell validator): set +``VERSION_BUMP_GIT_INTEGRATION=1`` and ``VERSION_BUMP_TEST_BRANCH=MAJOR.MINOR``. +See ``unittest/test_version_bump_validation.py`` module docstring. +""" + +from __future__ import annotations + +import argparse +import re +import sys +from typing import Optional, Tuple + +SEMVER_RE = re.compile(r"^([0-9]+)\.([0-9]+)\.([0-9]+)$") +BRANCH_RE = re.compile(r"^([0-9]+)\.([0-9]+)$") + + +def parse_semver(version: str) -> Optional[Tuple[int, int, int]]: + m = SEMVER_RE.match(version.strip()) + if not m: + return None + return (int(m.group(1)), int(m.group(2)), int(m.group(3))) + + +def parse_release_branch(branch: str) -> Optional[Tuple[int, int]]: + m = BRANCH_RE.match(branch.strip()) + if not m: + return None + return (int(m.group(1)), int(m.group(2))) + + +def validate_version_bump_params( + *, + current_version: str, + new_version: str, + branch: str, +) -> None: + """Validate patch bump parameters. Raises ValueError on failure. + + When current_version == new_version, the bump is a no-op and always valid. + """ + new_t = parse_semver(new_version) + if new_t is None: + raise ValueError( + f"NEW_VERSION must be MAJOR.MINOR.PATCH (digits only), got {new_version!r}" + ) + new_major, new_minor, new_patch = new_t + + br = parse_release_branch(branch) + if br is None: + raise ValueError( + f"BRANCH must be MAJOR.MINOR (e.g. 9.5), got {branch!r}" + ) + br_major, br_minor = br + if br_major != new_major or br_minor != new_minor: + raise ValueError( + f"BRANCH {branch!r} must match MAJOR.MINOR of NEW_VERSION " + f"({new_major}.{new_minor}), got NEW_VERSION {new_version!r}" + ) + + cur_t = parse_semver(current_version) + if cur_t is None: + raise ValueError( + "elasticsearchVersion on branch must be MAJOR.MINOR.PATCH, " + f"got {current_version!r}" + ) + cur_major, cur_minor, cur_patch = cur_t + + if current_version.strip() == new_version.strip(): + return + + if cur_major != new_major or cur_minor != new_minor: + raise ValueError( + "patch bump requires same MAJOR.MINOR as current " + f"({cur_major}.{cur_minor} vs {new_major}.{new_minor})" + ) + expected_patch = cur_patch + 1 + if new_patch != expected_patch: + raise ValueError( + "patch bump expects NEW_VERSION patch = current patch + 1 " + f"({current_version} → {new_major}.{new_minor}.{expected_patch}), " + f"got {new_version}" + ) + + +def _cmd_validate(args: argparse.Namespace) -> int: + try: + validate_version_bump_params( + current_version=args.current, + new_version=args.new, + branch=args.branch, + ) + except ValueError as e: + print(f"ERROR: {e}", file=sys.stderr) + return 1 + return 0 + + +def _cmd_validate_and_report(args: argparse.Namespace) -> int: + rc = _cmd_validate(args) + if rc != 0: + return rc + cur = args.current.strip() + new = args.new.strip() + if cur == new: + print(f"OK: branch already at {new} — bump step will no-op.") + else: + print(f"OK: patch increment {cur} → {new}") + return 0 + + +def main() -> int: + parser = argparse.ArgumentParser( + description="ml-cpp patch version bump parameter validation" + ) + sub = parser.add_subparsers(dest="command", required=True) + + p_val = sub.add_parser( + "validate", + help="check current/new/branch (same rules as Buildkite)", + ) + p_val.add_argument("--current", required=True, help="elasticsearchVersion on branch") + p_val.add_argument("--new", required=True, dest="new", help="NEW_VERSION") + p_val.add_argument("--branch", required=True, help="BRANCH (MAJOR.MINOR)") + p_val.set_defaults(func=_cmd_validate) + + p_rep = sub.add_parser( + "validate-and-report", + help="validate and print the same OK lines as validate_version_bump_params.sh", + ) + p_rep.add_argument("--current", required=True) + p_rep.add_argument("--new", required=True, dest="new") + p_rep.add_argument("--branch", required=True) + p_rep.set_defaults(func=_cmd_validate_and_report) + + args = parser.parse_args() + return args.func(args) + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/dev-tools/wait_version_bump_dra.py b/dev-tools/wait_version_bump_dra.py new file mode 100755 index 000000000..d611680ed --- /dev/null +++ b/dev-tools/wait_version_bump_dra.py @@ -0,0 +1,108 @@ +#!/usr/bin/env python3 +# Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +# or more contributor license agreements. Licensed under the Elastic License +# 2.0 and the following additional limitation. Functionality enabled by the +# files subject to the Elastic License 2.0 may only be used in production when +# invoked by an Elasticsearch process with a license key installed that permits +# use of machine learning features. You may not use this file except in +# compliance with the Elastic License 2.0 and the foregoing additional +# limitation. + +"""Poll DRA staging/snapshot JSON until versions match (replaces json-watcher plugin). + +Buildkite step conditionals cannot use build meta-data; this script reads +ml_cpp_version_bump_changed via ``buildkite-agent meta-data get`` and exits +immediately when no PR was opened. +""" + +from __future__ import annotations + +import json +import os +import subprocess +import sys +import time +import urllib.error +import urllib.request + +POLL_SECONDS = 30 +TIMEOUT_SECONDS = 240 * 60 + +STAGING_TMPL = "https://artifacts-staging.elastic.co/ml-cpp/latest/{branch}.json" +SNAPSHOT_TMPL = "https://storage.googleapis.com/elastic-artifacts-snapshot/ml-cpp/latest/{branch}.json" + + +def _meta_get(key: str) -> str | None: + if os.environ.get("BUILDKITE") != "true": + return None + try: + proc = subprocess.run( + ["buildkite-agent", "meta-data", "get", key], + capture_output=True, + text=True, + check=True, + timeout=60, + ) + v = proc.stdout.strip() + return v if v else None + except (subprocess.CalledProcessError, FileNotFoundError, subprocess.TimeoutExpired): + return None + + +def _fetch_version(url: str) -> str | None: + try: + req = urllib.request.Request(url, headers={"User-Agent": "ml-cpp-version-bump-dra-wait"}) + with urllib.request.urlopen(req, timeout=60) as resp: + data = json.loads(resp.read().decode("utf-8")) + ver = data.get("version") + if ver is None: + return None + return str(ver).strip() + except (urllib.error.URLError, json.JSONDecodeError, UnicodeDecodeError, ValueError): + return None + + +def main() -> int: + if os.environ.get("DRY_RUN") == "true": + print("DRY_RUN=true — skipping DRA wait.") + return 0 + + if _meta_get("ml_cpp_version_bump_changed") != "true": + print( + "ml_cpp_version_bump_changed is not true — no PR opened; skipping DRA wait.", + file=sys.stderr, + ) + return 0 + + branch = os.environ.get("BRANCH", "").strip() + new_version = os.environ.get("NEW_VERSION", "").strip() + if not branch or not new_version: + print("ERROR: BRANCH and NEW_VERSION must be set.", file=sys.stderr) + return 1 + + staging_url = STAGING_TMPL.format(branch=branch) + snapshot_url = SNAPSHOT_TMPL.format(branch=branch) + want_staging = new_version + want_snapshot = f"{new_version}-SNAPSHOT" + + print(f"Waiting for DRA artifacts (timeout {TIMEOUT_SECONDS}s, poll {POLL_SECONDS}s)...") + print(f" staging: {want_staging!r} <= {staging_url}") + print(f" snapshot: {want_snapshot!r} <= {snapshot_url}") + + deadline = time.monotonic() + TIMEOUT_SECONDS + while time.monotonic() < deadline: + st = _fetch_version(staging_url) + sn = _fetch_version(snapshot_url) + if st == want_staging and sn == want_snapshot: + print("OK: staging and snapshot versions matched.") + return 0 + if st is not None or sn is not None: + print(f" staging={st!r} snapshot={sn!r} (still waiting)") + time.sleep(POLL_SECONDS) + + print("ERROR: timed out waiting for DRA artifact versions.", file=sys.stderr) + return 1 + + +if __name__ == "__main__": + sys.exit(main())