diff --git a/.buildkite/x-pack/elastic-agent/pipeline.xpack.elastic-agent.package.yml b/.buildkite/x-pack/elastic-agent/pipeline.xpack.elastic-agent.package.yml new file mode 100644 index 000000000000..ff81812043f5 --- /dev/null +++ b/.buildkite/x-pack/elastic-agent/pipeline.xpack.elastic-agent.package.yml @@ -0,0 +1,127 @@ +# yaml-language-server: $schema=https://raw.githubusercontent.com/buildkite/pipeline-schema/main/schema.json + +steps: + - input: "Build parameters" + if: build.env("ManifestURL") == null + fields: + - text: "ManifestURL" + key: "ManifestURL" + default: "" + required: true + hint: "Link to the build manifest URL." + - select: "Mage verbose" + key: "MAGEFILE_VERBOSE" + required: false + options: + - label: "True" + value: "1" + - label: "False" + value: "0" + hint: "Increase verbosity of the mage commands, defaults to False" + - select: "DRA Workflow" + key: "DRA_WORKFLOW" + required: true + options: + - label: "snapshot" + value: "snapshot" + - label: "staging" + value: "staging" + hint: "What workflow of the DRA release process this build is going to be triggered for" + - text: "DRA Version" + key: "DRA_VERSION" + required: true + default: "" + hint: "The packaging version to use" + - select: "DRA DRY-RUN" + key: "DRA_DRY_RUN" + required: false + options: + - label: "True" + value: "--dry-run" + - label: "False" + value: "" + hint: "If the DRA release manager script would actually publish anything or just print" + + - wait: ~ + if: build.env("ManifestURL") == null + + - group: ":Packaging Artefacts" + key: "package" + steps: + - label: "Package elastic-agent" + key: package_elastic-agent + command: | + if [[ -z "$${ManifestURL}" ]]; then + export ManifestURL=$(buildkite-agent meta-data get ManifestURL --default "") + if [[ -z "$${ManifestURL}" ]]; then + echo ":broken_heart: Missing ManifestURL variable or empty string provided" + echo "Provided ManifestURL: ${ManifestURL}" + exit 1 + fi + fi + if [[ -z "$${MAGEFILE_VERBOSE}" ]]; then + export MAGEFILE_VERBOSE=$(buildkite-agent meta-data get MAGEFILE_VERBOSE --default "0") + fi + .buildkite/x-pack/elastic-agent/scripts/steps/package.sh + artifact_paths: + - "x-pack/elastic-agent/build/distributions/**/*" + agents: + provider: "gcp" + image: "family/platform-ingest-beats-ubuntu-2204" + + - label: "Package ARM elastic-agent" + env: + PLATFORMS: "linux/arm64" + PACKAGES: "docker" + key: package_elastic-agent-arm + command: | + if [[ -z "$${ManifestURL}" ]]; then + export ManifestURL=$(buildkite-agent meta-data get ManifestURL --default "") + if [[ -z "$${ManifestURL}" ]]; then + echo ":broken_heart: Missing ManifestURL variable or empty string provided" + echo "Provided ManifestURL: ${ManifestURL}" + exit 1 + fi + fi + if [[ -z "$${MAGEFILE_VERBOSE}" ]]; then + export MAGEFILE_VERBOSE=$(buildkite-agent meta-data get MAGEFILE_VERBOSE --default "0") + fi + .buildkite/x-pack/elastic-agent/scripts/steps/package.sh + artifact_paths: + - "x-pack/elastic-agent/build/distributions/**/*" + agents: + provider: "aws" + instanceType: "t4g.2xlarge" + imagePrefix: "platform-ingest-beats-ubuntu-2004-aarch64" + # + - label: ":elastic-stack: Publishing to DRA" + key: dra-publish + depends_on: package + agents: + provider: "gcp" + env: + DRA_PROJECT_ID: "elastic-agent-package" + DRA_PROJECT_ARTIFACT_ID: "agent-package" + command: | + echo "+++ Restoring Artifacts" + mkdir build + buildkite-agent artifact download "x-pack/elastic-agent/build/**/*" . + echo "+++ Changing permissions for the release manager" + sudo chown -R :1000 x-pack/elastic-agent/build/distributions/ + echo "+++ Running DRA publish step" + if [[ -z "${MAGEFILE_VERBOSE}" ]]; then + export MAGEFILE_VERBOSE=$(buildkite-agent meta-data get MAGEFILE_VERBOSE --default "0") + fi + if [[ -z "${DRA_DRY_RUN}" ]]; then + DRA_DRY_RUN=$(buildkite-agent meta-data get DRA_DRY_RUN --default "") + export DRA_DRY_RUN + fi + if [[ -z "${DRA_VERSION}" ]]; then + DRA_VERSION=$(buildkite-agent meta-data get DRA_VERSION --default "") + export DRA_VERSION + fi + if [[ -z "${DRA_WORKFLOW}" ]]; then + DRA_WORKFLOW=$(buildkite-agent meta-data get DRA_WORKFLOW --default "") + export DRA_WORKFLOW + fi + .buildkite/x-pack/elastic-agent/scripts/steps/dra-publish.sh diff --git a/.buildkite/x-pack/elastic-agent/scripts/steps/dra-publish.sh b/.buildkite/x-pack/elastic-agent/scripts/steps/dra-publish.sh new file mode 100755 index 000000000000..dc211189affe --- /dev/null +++ b/.buildkite/x-pack/elastic-agent/scripts/steps/dra-publish.sh @@ -0,0 +1,94 @@ +#!/bin/bash + +set -uo pipefail + +DRY_RUN="${DRA_DRY_RUN:=""}" +WORKFLOW="${DRA_WORKFLOW:=""}" +COMMIT="${DRA_COMMIT:="${BUILDKITE_COMMIT:=""}"}" +BRANCH="${DRA_BRANCH:="${BUILDKITE_BRANCH:=""}"}" +PACKAGE_VERSION="${DRA_VERSION:="${BEAT_VERSION:=""}"}" +CI_DRA_ROLE_PATH="kv/ci-shared/release/dra-role" + +# force main branch on PR's or it won't execute +# because the PR branch does not have a project folder in release-manager +if [[ "${BUILDKITE_PULL_REQUEST:="false"}" != "false" ]]; then + BRANCH=7.17 + DRY_RUN="--dry-run" + echo "+++ Running in PR or test branch and setting branch 7.17 and --dry-run" +fi + + + +if [[ -z "${WORKFLOW}" ]]; then + echo "+++ Missing DRA workflow"; + exit 1 +fi +if [[ -z "${COMMIT:-""}" ]]; then + echo "+++ Missing DRA_COMMIT"; + exit 1 +fi +if [[ -z "${PACKAGE_VERSION:-""}" ]]; then + echo "+++ Missing DRA_VERSION"; + exit 1 +fi +if [[ -z "${BRANCH:-""}" ]]; then + echo "+++ Missing DRA_BRANCH"; + exit 1 +fi + +function release_manager_login { + DRA_CREDS_SECRET=$(retry 5 vault kv get -field=data -format=json ${CI_DRA_ROLE_PATH}) + VAULT_ADDR_SECRET=$(echo ${DRA_CREDS_SECRET} | jq -r '.vault_addr') + VAULT_ROLE_ID_SECRET=$(echo ${DRA_CREDS_SECRET} | jq -r '.role_id') + VAULT_SECRET=$(echo ${DRA_CREDS_SECRET} | jq -r '.secret_id') + export VAULT_ADDR_SECRET VAULT_ROLE_ID_SECRET VAULT_SECRET +} + +# Listing Release manager +function run_release_manager_list() { + local _project_id="${1}" _artifact_set="${2}" _workflow="${3}" _commit="${4}" _branch="${5}" _version="${6}" + echo "+++ :hammer_and_pick: Release manager listing ${_branch} ${_workflow} DRA artifacts..." + docker run --rm \ + --name release-manager \ + -e VAULT_ADDR="${VAULT_ADDR_SECRET}" \ + -e VAULT_ROLE_ID="${VAULT_ROLE_ID_SECRET}" \ + -e VAULT_SECRET_ID="${VAULT_SECRET}" \ + --mount type=bind,readonly=false,src="${PWD}",target=/artifacts \ + docker.elastic.co/infra/release-manager:latest \ + cli list \ + --project "${_project_id}" \ + --branch "${_branch}" \ + --commit "${_commit}" \ + --workflow "${_workflow}" \ + --version "${_version}" \ + --artifact-set "${_artifact_set}" +} + +# Publish DRA artifacts +function run_release_manager_collect() { + local _project_id="${1}" _artifact_set="${2}" _workflow="${3}" _commit="${4}" _branch="${5}" _version="${6}" _dry_run="${7}" + echo "+++ :hammer_and_pick: Publishing ${_branch} ${_workflow} DRA artifacts..." + docker run --rm \ + --name release-manager \ + -e VAULT_ADDR="${VAULT_ADDR_SECRET}" \ + -e VAULT_ROLE_ID="${VAULT_ROLE_ID_SECRET}" \ + -e VAULT_SECRET_ID="${VAULT_SECRET}" \ + --mount type=bind,readonly=false,src="${PWD}",target=/artifacts \ + docker.elastic.co/infra/release-manager:latest \ + cli collect \ + --project "${_project_id}" \ + --branch "${_branch}" \ + --commit "${_commit}" \ + --workflow "${_workflow}" \ + --version "${_version}" \ + --artifact-set "${_artifact_set}" \ + ${_dry_run} +} + +echo "+++ Release Manager ${WORKFLOW} / ${BRANCH} / ${COMMIT}"; +release_manager_login +run_release_manager_list "${DRA_PROJECT_ID}" "${DRA_PROJECT_ARTIFACT_ID}" "${WORKFLOW}" "${COMMIT}" "${BRANCH}" "${PACKAGE_VERSION}" +run_release_manager_collect "${DRA_PROJECT_ID}" "${DRA_PROJECT_ARTIFACT_ID}" "${WORKFLOW}" "${COMMIT}" "${BRANCH}" "${PACKAGE_VERSION}" "${DRY_RUN}" +RM_EXIT_CODE=$? + +exit $RM_EXIT_CODE diff --git a/.buildkite/x-pack/elastic-agent/scripts/steps/package.sh b/.buildkite/x-pack/elastic-agent/scripts/steps/package.sh new file mode 100755 index 000000000000..a9c0d3763de4 --- /dev/null +++ b/.buildkite/x-pack/elastic-agent/scripts/steps/package.sh @@ -0,0 +1,28 @@ +#!/usr/bin/env bash +set -uo pipefail +source ".buildkite/x-pack/elastic-agent/scripts/steps/setenv.sh" +source ".buildkite/x-pack/elastic-agent/scripts/steps/common.sh" + +DIST_PATH="x-pack/elastic-agent/build/distributions" + +set -x +if test -z "${ManifestURL=:""}"; then + echo "Missing variable ManifestURL, export it before use." + exit 2 +fi + +VERSION="$(make get-version)" +echo "--- Packaging Elastic Agent" + +echo $ManifestURL + +export AGENT_DROP_PATH=build/elastic-agent-drop +mkdir -p $AGENT_DROP_PATH + +mage -v -d x-pack/elastic-agent clean downloadManifest package ironbank fixDRADockerArtifacts + +echo "+++ Generate dependencies report" +BEAT_VERSION_FULL=$(curl -s -XGET "${ManifestURL}" |jq '.version' -r ) +bash dev-tools/dependencies-report +mkdir -p $DIST_PATH/reports +mv dependencies.csv "$DIST_PATH/reports/dependencies-${BEAT_VERSION_FULL}.csv" \ No newline at end of file diff --git a/.buildkite/x-pack/elastic-agent/scripts/steps/setenv.sh b/.buildkite/x-pack/elastic-agent/scripts/steps/setenv.sh new file mode 100644 index 000000000000..1af666be9838 --- /dev/null +++ b/.buildkite/x-pack/elastic-agent/scripts/steps/setenv.sh @@ -0,0 +1,56 @@ +#!/usr/bin/env bash + +set -euo pipefail + +SETUP_GVM_VERSION="v0.5.1" +DOCKER_COMPOSE_VERSION="1.21.0" +DOCKER_COMPOSE_VERSION_AARCH64="v2.21.0" +SETUP_WIN_PYTHON_VERSION="3.11.0" +NMAP_WIN_VERSION="7.12" # Earlier versions of NMap provide WinPcap (the winpcap packages don't install nicely because they pop-up a UI) +GO_VERSION=$(cat .go-version) +ASDF_MAGE_VERSION="1.15.0" +PACKAGING_PLATFORMS="+all linux/amd64 linux/arm64 windows/amd64 darwin/amd64 darwin/arm64" +PACKAGING_ARM_PLATFORMS="linux/arm64" + +export SETUP_GVM_VERSION +export DOCKER_COMPOSE_VERSION +export DOCKER_COMPOSE_VERSION_AARCH64 +export SETUP_WIN_PYTHON_VERSION +export NMAP_WIN_VERSION +export GO_VERSION +export ASDF_MAGE_VERSION +export PACKAGING_PLATFORMS +export PACKAGING_ARM_PLATFORMS + +exportVars() { + local platform_type="$(uname)" + local arch_type="$(uname -m)" + if [ "${arch_type}" == "x86_64" ]; then + case "${platform_type}" in + Linux|Darwin) + export GOX_FLAGS="-arch amd64" + export testResults="**/build/TEST*.xml" + export artifacts="**/build/TEST*.out" + ;; + MINGW*) + export GOX_FLAGS="-arch 386" + export testResults="**\\build\\TEST*.xml" + export artifacts="**\\build\\TEST*.out" + ;; + esac + elif [[ "${arch_type}" == "aarch64" || "${arch_type}" == "arm64" ]]; then + export GOX_FLAGS="-arch arm" + export testResults="**/build/TEST*.xml" + export artifacts="**/build/TEST*.out" + else + echo "Unsupported OS" + fi +} + + +if [[ "$BUILDKITE_PIPELINE_SLUG" == "beats-metricbeat" ]]; then + exportVars + export RACE_DETECTOR="true" + export TEST_COVERAGE="true" + export DOCKER_PULL="0" +fi \ No newline at end of file diff --git a/dev-tools/mage/artifacts/artifacts_api.go b/dev-tools/mage/artifacts/artifacts_api.go new file mode 100644 index 000000000000..5a8d67643bba --- /dev/null +++ b/dev-tools/mage/artifacts/artifacts_api.go @@ -0,0 +1,285 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package artifacts + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "io" + "net/http" + "net/url" + "sort" + + "github.com/elastic/beats/v7/dev-tools/mage/version" +) + +const ( + defaultArtifactAPIURL = "https://artifacts-api.elastic.co/" + + artifactsAPIV1VersionsEndpoint = "v1/versions/" + artifactsAPIV1VersionBuildsEndpoint = "v1/versions/%s/builds/" + artifactAPIV1BuildDetailsEndpoint = "v1/versions/%s/builds/%s" + // artifactAPIV1SearchVersionPackage = "v1/search/%s/%s" +) + +var ( + ErrLatestVersionNil = errors.New("latest version is nil") + ErrSnapshotVersionsEmpty = errors.New("snapshot list is nil") + ErrInvalidVersionRetrieved = errors.New("invalid version retrieved from artifact API") + + ErrBadHTTPStatusCode = errors.New("bad http status code") +) + +type Manifests struct { + LastUpdateTime string `json:"last-update-time"` + SecondsSinceLastUpdate int `json:"seconds-since-last-update"` +} + +type VersionList struct { + Versions []string `json:"versions"` + Aliases []string `json:"aliases"` + Manifests Manifests `json:"manifests"` +} + +type VersionBuilds struct { + Builds []string `json:"builds"` + Manifests Manifests `json:"manifests"` +} + +type Package struct { + URL string `json:"url"` + ShaURL string `json:"sha_url"` + AscURL string `json:"asc_url"` + Type string `json:"type"` + Architecture string `json:"architecture"` + Os []string `json:"os"` + Classifier string `json:"classifier"` + Attributes struct { + IncludeInRepo string `json:"include_in_repo"` + ArtifactNoKpi string `json:"artifactNoKpi"` + Internal string `json:"internal"` + ArtifactID string `json:"artifact_id"` + Oss string `json:"oss"` + Group string `json:"group"` + } `json:"attributes"` +} + +type Dependency struct { + Prefix string `json:"prefix"` + BuildUri string `json:"build_uri"` +} + +type Project struct { + Branch string `json:"branch"` + CommitHash string `json:"commit_hash"` + CommitURL string `json:"commit_url"` + ExternalArtifactsManifestURL string `json:"external_artifacts_manifest_url"` + BuildDurationSeconds int `json:"build_duration_seconds"` + Packages map[string]Package `json:"packages"` + Dependencies []Dependency `json:"dependencies"` +} + +type Build struct { + Projects map[string]Project `json:"projects"` + StartTime string `json:"start_time"` + ReleaseBranch string `json:"release_branch"` + Prefix string `json:"prefix"` + EndTime string `json:"end_time"` + ManifestVersion string `json:"manifest_version"` + Version string `json:"version"` + Branch string `json:"branch"` + BuildID string `json:"build_id"` + BuildDurationSeconds int `json:"build_duration_seconds"` +} + +type BuildDetails struct { + Build Build + Manifests Manifests `json:"manifests"` +} + +type SearchPackageResult struct { + Packages map[string]Package `json:"packages"` + Manifests Manifests `json:"manifests"` +} + +type httpDoer interface { + Do(req *http.Request) (*http.Response, error) +} + +type ArtifactAPIClientOpt func(aac *ArtifactAPIClient) + +func WithUrl(url string) ArtifactAPIClientOpt { + return func(aac *ArtifactAPIClient) { aac.url = url } +} + +func WithHttpClient(client httpDoer) ArtifactAPIClientOpt { + return func(aac *ArtifactAPIClient) { aac.c = client } +} + +// ArtifactAPIClient is a small (and incomplete) client for the Elastic artifact API. +// More information about the API can be found at https://artifacts-api.elastic.co/v1 +// which will print a list of available operations +type ArtifactAPIClient struct { + c httpDoer + url string +} + +// NewArtifactAPIClient creates a new Artifact API client +func NewArtifactAPIClient(opts ...ArtifactAPIClientOpt) *ArtifactAPIClient { + c := &ArtifactAPIClient{ + url: defaultArtifactAPIURL, + c: new(http.Client), + } + + for _, opt := range opts { + opt(c) + } + + return c +} + +// GetVersions returns a list of versions as server by the Artifact API along with some aliases and manifest information +func (aac ArtifactAPIClient) GetVersions(ctx context.Context) (list *VersionList, err error) { + joinedURL, err := aac.composeURL(artifactsAPIV1VersionsEndpoint) + if err != nil { + return nil, fmt.Errorf("couldn't compose URL: %w", err) + } + + resp, err := aac.createAndPerformRequest(ctx, joinedURL) + if err != nil { + return nil, fmt.Errorf("getting versions: %w", err) + } + defer resp.Body.Close() + + return checkResponseAndUnmarshal[VersionList](resp) +} + +// GetBuildsForVersion returns a list of builds for a specific version. +// version should be one of the version strings returned by the GetVersions (expected format is semver +// with optional prerelease but no build metadata, for example 8.9.0-SNAPSHOT) +func (aac ArtifactAPIClient) GetBuildsForVersion(ctx context.Context, version string) (builds *VersionBuilds, err error) { + joinedURL, err := aac.composeURL(fmt.Sprintf(artifactsAPIV1VersionBuildsEndpoint, version)) + if err != nil { + return nil, fmt.Errorf("couldn't compose URL: %w", err) + } + + resp, err := aac.createAndPerformRequest(ctx, joinedURL) + if err != nil { + return nil, fmt.Errorf("getting builds for version %s: %w", version, err) + } + defer resp.Body.Close() + + return checkResponseAndUnmarshal[VersionBuilds](resp) +} + +// GetBuildDetails returns the list of project and artifacts related to a specific build. +// Version parameter format follows semver (without build metadata) and buildID format is ..- as returned by +// GetBuildsForVersion() +func (aac ArtifactAPIClient) GetBuildDetails(ctx context.Context, version string, buildID string) (buildDetails *BuildDetails, err error) { + joinedURL, err := aac.composeURL(fmt.Sprintf(artifactAPIV1BuildDetailsEndpoint, version, buildID)) + if err != nil { + return nil, fmt.Errorf("couldn't compose URL: %w", err) + } + + resp, err := aac.createAndPerformRequest(ctx, joinedURL) + if err != nil { + return nil, fmt.Errorf("getting build details for version %s buildID %s: %w", version, buildID, err) + } + defer resp.Body.Close() + + return checkResponseAndUnmarshal[BuildDetails](resp) +} + +func (aac ArtifactAPIClient) composeURL(relativePath string) (string, error) { + joinedURL, err := url.JoinPath(aac.url, relativePath) + if err != nil { + return "", fmt.Errorf("composing URL with %q %q: %w", aac.url, relativePath, err) + } + + return joinedURL, nil +} + +func (aac ArtifactAPIClient) createAndPerformRequest(ctx context.Context, URL string) (*http.Response, error) { + req, err := http.NewRequestWithContext(ctx, http.MethodGet, URL, nil) + if err != nil { + err = fmt.Errorf("composing request: %w", err) + return nil, err + } + + resp, err := aac.c.Do(req) + if err != nil { + return nil, fmt.Errorf("executing http request %v: %w", req, err) + } + + return resp, nil +} + +func checkResponseAndUnmarshal[T any](resp *http.Response) (*T, error) { + if resp.StatusCode != http.StatusOK { + return nil, fmt.Errorf("%d: %w", resp.StatusCode, ErrBadHTTPStatusCode) + } + + respBytes, err := io.ReadAll(resp.Body) + if err != nil { + return nil, fmt.Errorf("reading response body: %w", err) + } + result := new(T) + err = json.Unmarshal(respBytes, result) + + if err != nil { + return nil, fmt.Errorf("unmarshaling: %w", err) + } + + return result, nil +} + +type logger interface { + Logf(format string, args ...any) +} + +func (aac ArtifactAPIClient) GetLatestSnapshotVersion(ctx context.Context, log logger) (*version.ParsedSemVer, error) { + vList, err := aac.GetVersions(ctx) + if err != nil { + return nil, err + } + + if vList == nil { + return nil, ErrSnapshotVersionsEmpty + } + + sortedParsedVersions := make(version.SortableParsedVersions, 0, len(vList.Versions)) + for _, v := range vList.Versions { + pv, err := version.ParseVersion(v) + if err != nil { + log.Logf("invalid version retrieved from artifact API: %q", v) + return nil, ErrInvalidVersionRetrieved + } + sortedParsedVersions = append(sortedParsedVersions, pv) + } + + if len(sortedParsedVersions) == 0 { + return nil, ErrSnapshotVersionsEmpty + } + + // normally the output of the versions returned by artifact API is already + // sorted in ascending order.If we want to sort in descending order we need + // to pass a sort.Reverse to sort.Sort. + sort.Sort(sort.Reverse(sortedParsedVersions)) + + var latestSnapshotVersion *version.ParsedSemVer + // fetch the latest SNAPSHOT build + for _, pv := range sortedParsedVersions { + if pv.IsSnapshot() { + latestSnapshotVersion = pv + break + } + } + if latestSnapshotVersion == nil { + return nil, ErrLatestVersionNil + } + return latestSnapshotVersion, nil +} diff --git a/dev-tools/mage/crossbuild.go b/dev-tools/mage/crossbuild.go index f59dc2e710e3..9bb5fed49f1f 100644 --- a/dev-tools/mage/crossbuild.go +++ b/dev-tools/mage/crossbuild.go @@ -30,7 +30,6 @@ import ( "github.com/magefile/mage/mg" "github.com/magefile/mage/sh" - "github.com/pkg/errors" "github.com/elastic/beats/v7/dev-tools/mage/gotool" "github.com/elastic/beats/v7/libbeat/common/file" @@ -43,7 +42,7 @@ const defaultCrossBuildTarget = "golangCrossBuild" // See NewPlatformList for details about platform filtering expressions. var Platforms = BuildPlatforms.Defaults() -// Types is the list of package types +// SelectedPackageTypes Types is the list of package types var SelectedPackageTypes []PackageType func init() { @@ -136,6 +135,7 @@ func CrossBuild(options ...CrossBuildOption) error { // Docker is required for this target. if err := HaveDocker(); err != nil { + log.Printf("No docker found: %v\n", err) return err } @@ -162,8 +162,8 @@ func CrossBuild(options ...CrossBuildOption) error { builder := GolangCrossBuilder{buildPlatform.Name, params.Target, params.InDir, params.ImageSelector} if params.Serial { if err := builder.Build(); err != nil { - return errors.Wrapf(err, "failed cross-building target=%v for platform=%v %v", params.ImageSelector, - params.Target, buildPlatform.Name) + return fmt.Errorf("failed cross-building target=%v for platform=%v %v. Err: %w", params.ImageSelector, + params.Target, buildPlatform.Name, err) } } else { deps = append(deps, builder.Build) @@ -244,11 +244,11 @@ type GolangCrossBuilder struct { // Build executes the build inside of Docker. func (b GolangCrossBuilder) Build() error { - fmt.Printf(">> %v: Building for %v\n", b.Target, b.Platform) + log.Printf(">> %v: Building for %v\n", b.Target, b.Platform) repoInfo, err := GetProjectRepoInfo() if err != nil { - return errors.Wrap(err, "failed to determine repo root and package sub dir") + return fmt.Errorf("failed to determine repo root and package sub dir. Err: %w", err) } mountPoint := filepath.ToSlash(filepath.Join("/go", "src", repoInfo.CanonicalRootImportPath)) @@ -262,13 +262,13 @@ func (b GolangCrossBuilder) Build() error { builderArch := runtime.GOARCH buildCmd, err := filepath.Rel(workDir, filepath.Join(mountPoint, repoInfo.SubDir, "build/mage-linux-"+builderArch)) if err != nil { - return errors.Wrap(err, "failed to determine mage-linux-"+builderArch+" relative path") + return fmt.Errorf("failed to determine mage-linux-"+builderArch+" relative path. Err: %w", err) } dockerRun := sh.RunCmd("docker", "run") image, err := b.ImageSelector(b.Platform) if err != nil { - return errors.Wrap(err, "failed to determine golang-crossbuild image tag") + return fmt.Errorf("failed to determine golang-crossbuild image tag. Err: %w", err) } verbose := "" if mg.Verbose() { @@ -336,7 +336,7 @@ func chownPaths(uid, gid int, path string) error { start := time.Now() numFixed := 0 defer func() { - log.Printf("chown took: %v, changed %d files", time.Now().Sub(start), numFixed) + log.Printf("chown took: %v, changed %d files", time.Since(start), numFixed) }() return filepath.Walk(path, func(name string, info os.FileInfo, err error) error { @@ -357,7 +357,7 @@ func chownPaths(uid, gid int, path string) error { } if err := os.Chown(name, uid, gid); err != nil { - return errors.Wrapf(err, "failed to chown path=%v", name) + return fmt.Errorf("failed to chown path=%v. Err: %w", name, err) } numFixed++ return nil diff --git a/dev-tools/mage/manifest/manifest.go b/dev-tools/mage/manifest/manifest.go new file mode 100644 index 000000000000..8fa7fe779521 --- /dev/null +++ b/dev-tools/mage/manifest/manifest.go @@ -0,0 +1,162 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package manifest + +import ( + "context" + "errors" + "fmt" + "log" + "net/url" + "os" + "path" + "path/filepath" + "time" + + artifacts "github.com/elastic/beats/v7/dev-tools/mage/artifacts" + + "github.com/magefile/mage/mg" + "golang.org/x/sync/errgroup" +) + +// A backoff schedule for when and how often to retry failed HTTP +// requests. The first element is the time to wait after the +// first failure, the second the time to wait after the second +// failure, etc. After reaching the last element, retries stop +// and the request is considered failed. +var backoffSchedule = []time.Duration{ + 1 * time.Second, + 3 * time.Second, + 10 * time.Second, +} + +var errorInvalidManifestURL = errors.New("invalid ManifestURL provided") +var errorNotAllowedManifestURL = errors.New("the provided ManifestURL is not allowed URL") + +var AllowedManifestHosts = []string{"snapshots.elastic.co", "staging.elastic.co", "artifacts-staging.elastic.co"} + +// DownloadManifest is going to download the given manifest file and return the ManifestResponse +func DownloadManifest(manifest string) (artifacts.Build, error) { + log.Printf(">>>> Downloading manifest %s", manifest) + manifestUrl, urlError := url.Parse(manifest) + if urlError != nil { + return artifacts.Build{}, errorInvalidManifestURL + } + var valid = false + for _, manifestHost := range AllowedManifestHosts { + if manifestHost == manifestUrl.Host { + valid = true + break + } + } + if !valid { + log.Printf("Not allowed %s, valid ones are %+v", manifestUrl.Host, AllowedManifestHosts) + return artifacts.Build{}, errorNotAllowedManifestURL + } + sanitizedUrl := fmt.Sprintf("https://%s%s", manifestUrl.Host, manifestUrl.Path) + f := func() (artifacts.Build, error) { return downloadManifestData(sanitizedUrl) } + manifestResponse, err := doWithRetries(f) + if err != nil { + return artifacts.Build{}, fmt.Errorf("downloading manifest: %w", err) + } + + log.Printf(">>>> Downloaded manifest %s", manifest) + log.Printf(">>>> Packaing version: %s, build_id: %s, manifest_version:%s", manifestResponse.Version, manifestResponse.BuildID, manifestResponse.ManifestVersion) + + return manifestResponse, nil +} + +func resolveManifestPackage(project artifacts.Project, pkg string, reqPackage string, version string) []string { + packageName := fmt.Sprintf("%s-%s-%s", pkg, version, reqPackage) + val, ok := project.Packages[packageName] + if !ok { + return nil + } + if mg.Verbose() { + log.Printf(">>>>>>>>>>> Project branch/commit [%s, %s]", project.Branch, project.CommitHash) + } + return []string{val.URL, val.ShaURL, val.AscURL} + +} + +// DownloadComponentsFromManifest is going to download a set of components from the given manifest into the destination +// dropPath folder in order to later use that folder for packaging +func DownloadComponentsFromManifest(manifest string, platforms []string, platformPackages map[string]string, dropPath string) error { + componentSpec := map[string][]string{ + "apm-server": {"apm-server"}, + "beats": {"auditbeat", "filebeat", "heartbeat", "metricbeat", "osquerybeat", "packetbeat"}, + "cloud-defend": {"cloud-defend"}, + "cloudbeat": {"cloudbeat"}, + "elastic-agent-shipper": {"elastic-agent-shipper"}, + "endpoint-dev": {"endpoint-security"}, + "fleet-server": {"fleet-server"}, + "prodfiler": {"pf-elastic-collector", "pf-elastic-symbolizer", "pf-host-agent"}, + } + + manifestResponse, err := DownloadManifest(manifest) + if err != nil { + return fmt.Errorf("failed to download remote manifest file %w", err) + } + projects := manifestResponse.Projects + + errGrp, downloadsCtx := errgroup.WithContext(context.Background()) + for component, pkgs := range componentSpec { + for _, platform := range platforms { + targetPath := filepath.Join(dropPath) + err := os.MkdirAll(targetPath, 0755) + if err != nil { + return fmt.Errorf("failed to create directory %s", targetPath) + } + log.Printf("+++ Prepare to download project [%s] for [%s]", component, platform) + + for _, pkg := range pkgs { + reqPackage := platformPackages[platform] + pkgURL := resolveManifestPackage(projects[component], pkg, reqPackage, manifestResponse.Version) + if pkgURL != nil { + for _, p := range pkgURL { + log.Printf(">>>>>>>>> Downloading [%s] [%s] ", pkg, p) + pkgFilename := path.Base(p) + downloadTarget := filepath.Join(targetPath, pkgFilename) + if _, err := os.Stat(downloadTarget); err != nil { + errGrp.Go(func(ctx context.Context, url, target string) func() error { + return func() error { return downloadPackage(ctx, url, target) } + }(downloadsCtx, p, downloadTarget)) + } + } + } else if mg.Verbose() { + log.Printf(">>>>>>>>> Project [%s] does not have [%s] ", pkg, platform) + } + } + } + } + + err = errGrp.Wait() + if err != nil { + return fmt.Errorf("error downloading files: %w", err) + } + + log.Printf("Downloads for manifest %q complete.", manifest) + return nil +} + +func downloadPackage(ctx context.Context, downloadUrl string, target string) error { + parsedURL, errorUrl := url.Parse(downloadUrl) + if errorUrl != nil { + return errorInvalidManifestURL + } + var valid = false + for _, manifestHost := range AllowedManifestHosts { + if manifestHost == parsedURL.Host { + valid = true + } + } + if !valid { + log.Printf("Not allowed %s, valid ones are %+v", parsedURL.Host, AllowedManifestHosts) + return errorNotAllowedManifestURL + } + cleanUrl := fmt.Sprintf("https://%s%s", parsedURL.Host, parsedURL.Path) + _, err := doWithRetries(func() (string, error) { return downloadFile(ctx, cleanUrl, target) }) + return err +} diff --git a/dev-tools/mage/manifest/manifestspecs.go b/dev-tools/mage/manifest/manifestspecs.go new file mode 100644 index 000000000000..31dcafb5c3a1 --- /dev/null +++ b/dev-tools/mage/manifest/manifestspecs.go @@ -0,0 +1,91 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package manifest + +import ( + "context" + "encoding/json" + "fmt" + "io" + "log" + "net/http" + "os" + "time" + + "github.com/magefile/mage/mg" + + "github.com/elastic/beats/v7/dev-tools/mage/artifacts" +) + +func doWithRetries[T any](f func() (T, error)) (T, error) { + var err error + var resp T + for _, backoff := range backoffSchedule { + resp, err = f() + if err == nil { + return resp, nil + } + if mg.Verbose() { + log.Printf("Request error: %+v\n", err) + log.Printf("Retrying in %v\n", backoff) + } + time.Sleep(backoff) + } + + // All retries failed + return resp, err +} + +func downloadFile(ctx context.Context, url string, filepath string) (path string, err error) { + outFile, fileErr := os.Create(filepath) + if fileErr != nil { + return "", fmt.Errorf("failed to create destination file %w", fileErr) + } + defer func() { + err = outFile.Close() + }() + + req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, nil) + if err != nil { + return "", fmt.Errorf("error creating request for %q: %w", url, err) + } + + resp, reqErr := http.DefaultClient.Do(req) + if reqErr != nil { + return filepath, fmt.Errorf("failed to download manifest [%s]\n %w", url, err) + } + defer func() { + err = resp.Body.Close() + }() + + _, errCopy := io.Copy(outFile, resp.Body) + if errCopy != nil { + return "", fmt.Errorf("failed to decode manifest response [%s]\n %w", url, err) + } + if mg.Verbose() { + log.Printf("<<<<<<<<< Downloaded: %s to %s", url, filepath) + } + + return outFile.Name(), nil +} + +func downloadManifestData(url string) (artifacts.Build, error) { + var response artifacts.Build + resp, err := http.Get(url) //nolint // we should have already verified that this is a proper valid url + if err != nil { + return response, fmt.Errorf("failed to download manifest [%s]\n %w", url, err) + } + defer func() { + if err := resp.Body.Close(); err != nil { + panic(err) + } + }() + + err = json.NewDecoder(resp.Body).Decode(&response) + if err != nil { + return response, fmt.Errorf("failed to decode manifest response [%s]\n %w", url, err) + } + return response, nil +} diff --git a/dev-tools/mage/pkg.go b/dev-tools/mage/pkg.go index 5827c806d8b8..761249de285a 100644 --- a/dev-tools/mage/pkg.go +++ b/dev-tools/mage/pkg.go @@ -18,17 +18,18 @@ package mage import ( + "errors" "fmt" "log" "os" "path/filepath" + "regexp" "runtime" "strconv" "strings" "github.com/magefile/mage/mg" "github.com/magefile/mage/sh" - "github.com/pkg/errors" ) // Package packages the Beat for distribution. It generates packages based on @@ -203,14 +204,14 @@ func saveIronbank() error { ironbank := getIronbankContextName() buildDir := filepath.Join("build", ironbank) if _, err := os.Stat(buildDir); os.IsNotExist(err) { - return fmt.Errorf("cannot find the folder with the ironbank context: %+v", err) + return fmt.Errorf("cannot find the folder with the ironbank context: %w", err) } distributionsDir := "build/distributions" if _, err := os.Stat(distributionsDir); os.IsNotExist(err) { err := os.MkdirAll(distributionsDir, 0750) if err != nil { - return fmt.Errorf("cannot create folder for docker artifacts: %+v", err) + return fmt.Errorf("cannot create folder for docker artifacts: %w", err) } } tarGzFile := filepath.Join(distributionsDir, ironbank+".tar.gz") @@ -218,10 +219,15 @@ func saveIronbank() error { // Save the build context as tar.gz artifact err := TarWithOptions(buildDir, tarGzFile, true) if err != nil { - return fmt.Errorf("cannot compress the tar.gz file: %+v", err) + return fmt.Errorf("cannot compress the tar.gz file: %w", err) } - return errors.Wrap(CreateSHA512File(tarGzFile), "failed to create .sha512 file") + err = CreateSHA512File(tarGzFile) + if err != nil { + return fmt.Errorf("failed to create the sha512 file: %w", err) + } + + return nil } // isPackageTypeSelected returns true if SelectedPackageTypes is empty or if @@ -248,8 +254,11 @@ type packageBuilder struct { func (b packageBuilder) Build() error { fmt.Printf(">> package: Building %v type=%v for platform=%v\n", b.Spec.Name, b.Type, b.Platform.Name) log.Printf("Package spec: %+v", b.Spec) - return errors.Wrapf(b.Type.Build(b.Spec), "failed building %v type=%v for platform=%v", - b.Spec.Name, b.Type, b.Platform.Name) + err := b.Type.Build(b.Spec) + if err != nil { + return fmt.Errorf("failed building %v type=%v for platform=%v: %w", err) + } + return nil } type testPackagesParams struct { @@ -387,3 +396,52 @@ func TestBinaryGLIBCVersion(elfPath, maxGlibcVersion string) error { fmt.Printf(">> testBinaryGLIBCVersion: %q requires glibc %v or greater\n", elfPath, requiredGlibc) return nil } + +// FixDRADockerArtifacts is a workaround for the DRA artifacts produced by the package target. We had to do +// because the initial unified release manager DSL code required specific names that the package does not produce, +// we wanted to keep backwards compatibility with the artifacts of the unified release and the DRA. +// this follows the same logic as https://github.com/elastic/beats/blob/2fdefcfbc783eb4710acef07d0ff63863fa00974/.ci/scripts/prepare-release-manager.sh +func FixDRADockerArtifacts() error { + fmt.Println("--- Fixing Docker DRA artifacts") + distributionsPath := filepath.Join("build", "distributions") + // Find all the files with the given name + matches, err := filepath.Glob(filepath.Join(distributionsPath, "*docker.tar.gz*")) + if err != nil { + return err + } + if mg.Verbose() { + log.Printf("--- Found artifacts to rename %s %d", distributionsPath, len(matches)) + } + // Match the artifact name and break down into groups so that we can reconstruct the names as its expected by the DRA DSL + // As SNAPSHOT keyword or BUILDID are optional, capturing the separator - or + with the value. + artifactRegexp, err := regexp.Compile(`([\w+-]+)-(([0-9]+)\.([0-9]+)\.([0-9]+))([-|\+][\w]+)?-([\w]+)-([\w]+)\.([\w]+)\.([\w.]+)`) + if err != nil { + return err + } + for _, m := range matches { + artifactFile, err := os.Stat(m) + if err != nil { + return fmt.Errorf("failed stating file: %w", err) + } + if artifactFile.IsDir() { + continue + } + match := artifactRegexp.FindAllStringSubmatch(artifactFile.Name(), -1) + // The groups here is tightly coupled with the regexp above. + // match[0][6] already contains the separator so no need to add before the variable + targetName := fmt.Sprintf("%s-%s%s-%s-image-%s-%s.%s", match[0][1], match[0][2], match[0][6], match[0][9], match[0][7], match[0][8], match[0][10]) + if mg.Verbose() { + fmt.Printf("%#v\n", match) + fmt.Printf("Artifact: %s \n", artifactFile.Name()) + fmt.Printf("Renamed: %s \n", targetName) + } + renameErr := os.Rename(filepath.Join(distributionsPath, artifactFile.Name()), filepath.Join(distributionsPath, targetName)) + if renameErr != nil { + return renameErr + } + if mg.Verbose() { + fmt.Println("Renamed artifact") + } + } + return nil +} diff --git a/dev-tools/mage/platforms.go b/dev-tools/mage/platforms.go index 4be617ada83d..a2211ba999a8 100644 --- a/dev-tools/mage/platforms.go +++ b/dev-tools/mage/platforms.go @@ -18,10 +18,9 @@ package mage import ( + "fmt" "sort" "strings" - - "github.com/pkg/errors" ) // BuildPlatforms is a list of GOOS/GOARCH pairs supported by Go. @@ -202,6 +201,15 @@ func (p PlatformAttributes) String() string { // BuildPlatformList is a list of BuildPlatforms that supports filtering. type BuildPlatformList []BuildPlatform +// Returns all BuildPlatform names +func (list BuildPlatformList) Names() []string { + platforms := make([]string, len(list)) + for i, bp := range list { + platforms[i] = bp.Name + } + return platforms +} + // Get returns the BuildPlatform matching the given name. func (list BuildPlatformList) Get(name string) (BuildPlatform, bool) { for _, bp := range list { @@ -317,7 +325,7 @@ func newPlatformExpression(expr string) (*platformExpression, error) { } if !valid { - return nil, errors.Errorf("invalid platform in expression: %v", name) + return nil, fmt.Errorf("invalid platform in expression: %v", name) } } @@ -411,7 +419,7 @@ func (list BuildPlatformList) Filter(expr string) BuildPlatformList { return list } if len(pe.Add) > 0 { - panic(errors.Errorf("adds (%v) cannot be used in filter expressions", + panic(fmt.Errorf("adds (%v) cannot be used in filter expressions", strings.Join(pe.Add, ", "))) } diff --git a/dev-tools/mage/settings.go b/dev-tools/mage/settings.go index 1b6e07f7bac4..75112ca149c1 100644 --- a/dev-tools/mage/settings.go +++ b/dev-tools/mage/settings.go @@ -84,6 +84,11 @@ var ( versionQualified bool versionQualifier string + // PackagingFromManifest This value is set to true when we have defined a ManifestURL variable + PackagingFromManifest bool + // ManifestURL Location of the manifest file to package + ManifestURL string + FuncMap = map[string]interface{}{ "beat_doc_branch": BeatDocBranch, "beat_version": BeatQualifiedVersion, @@ -126,6 +131,8 @@ func init() { } versionQualifier, versionQualified = os.LookupEnv("VERSION_QUALIFIER") + ManifestURL = EnvOr("ManifestURL", "") + PackagingFromManifest = ManifestURL != "" } // ProjectType specifies the type of project (OSS vs X-Pack). diff --git a/dev-tools/mage/version/version_parser.go b/dev-tools/mage/version/version_parser.go new file mode 100644 index 000000000000..2dc301d37ebc --- /dev/null +++ b/dev-tools/mage/version/version_parser.go @@ -0,0 +1,189 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package version + +import ( + "errors" + "fmt" + "regexp" + "strconv" + "strings" +) + +// regexp taken from https://semver.org/ (see the FAQ section/Is there a suggested regular expression (RegEx) to check a SemVer string?) +const semVerFormat = `^(?P0|[1-9]\d*)\.(?P0|[1-9]\d*)\.(?P0|[1-9]\d*)(?:-(?P(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?P[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$` +const preReleaseSeparator = "-" +const metadataSeparator = "+" + +var semVerFmtRegEx *regexp.Regexp +var namedGroups map[string]int + +func init() { + // small init to compile the regex and build a map of named groups and indexes + semVerFmtRegEx = regexp.MustCompile(semVerFormat) + groups := semVerFmtRegEx.SubexpNames() + namedGroups = make(map[string]int, len(groups)) + for i, groupName := range groups { + namedGroups[groupName] = i + } +} + +var ErrNoMatch = errors.New("version string does not match expected format") + +type ParsedSemVer struct { + original string + major int + minor int + patch int + prerelease string + buildMetadata string +} + +func (psv ParsedSemVer) Original() string { + return psv.original +} + +func (psv ParsedSemVer) Major() int { + return psv.major +} + +func (psv ParsedSemVer) Minor() int { + return psv.minor +} + +func (psv ParsedSemVer) Patch() int { + return psv.patch +} + +func (psv ParsedSemVer) CoreVersion() string { + return fmt.Sprintf("%d.%d.%d", psv.Major(), psv.Minor(), psv.Patch()) +} + +func (psv ParsedSemVer) Prerelease() string { + return psv.prerelease +} + +func (psv ParsedSemVer) BuildMetadata() string { + return psv.buildMetadata +} + +func (psv ParsedSemVer) VersionWithPrerelease() string { + b := new(strings.Builder) + b.WriteString(psv.CoreVersion()) + if psv.prerelease != "" { + b.WriteString("-") + b.WriteString(psv.prerelease) + } + return b.String() +} + +func (psv ParsedSemVer) IsSnapshot() bool { + return psv.prerelease == "SNAPSHOT" || strings.HasSuffix(psv.prerelease, "-SNAPSHOT") +} + +func (psv ParsedSemVer) Less(other ParsedSemVer) bool { + // compare major version + if psv.major != other.major { + return psv.major < other.major + } + + //same major, check minor + if psv.minor != other.minor { + return psv.minor < other.minor + } + + //same minor, check patch + if psv.patch != other.patch { + return psv.patch < other.patch + } + + // last resort check if one is prereleas and the other isn't + if psv.prerelease != "" && other.prerelease == "" { + return true + } + + return false +} + +func (psv ParsedSemVer) GetPreviousMinor() (*ParsedSemVer, error) { + major := psv.Major() + minor := psv.Minor() + + if minor > 0 { + // We have at least one previous minor version in the current + // major version series. Set the patch to zero to guarnatee the + // version exists, the number of patch releases varies. + return NewParsedSemVer(major, minor-1, 0, psv.Prerelease(), psv.BuildMetadata()), nil + } + + // We are at the first minor of the current major version series. To + // figure out the previous minor, we need to rely on knowledge of + // the release versions from the past major series'. + if major == 8 { + return NewParsedSemVer(7, 17, 10, psv.Prerelease(), psv.BuildMetadata()), nil + } + + return nil, fmt.Errorf("unable to determine previous minor version for [%s]", psv.String()) +} + +func (psv ParsedSemVer) String() string { + b := new(strings.Builder) + b.WriteString(psv.CoreVersion()) + if psv.Prerelease() != "" { + b.WriteString(preReleaseSeparator) + b.WriteString(psv.Prerelease()) + } + if psv.BuildMetadata() != "" { + b.WriteString(metadataSeparator) + b.WriteString(psv.buildMetadata) + } + return b.String() +} + +func NewParsedSemVer(major int, minor int, patch int, prerelease string, metadata string) *ParsedSemVer { + return &ParsedSemVer{ + major: major, + minor: minor, + patch: patch, + prerelease: prerelease, + buildMetadata: metadata, + } +} + +func ParseVersion(version string) (*ParsedSemVer, error) { + matches := semVerFmtRegEx.FindStringSubmatch(strings.TrimSpace(version)) + if matches == nil { + return nil, ErrNoMatch + } + + major, err := strconv.Atoi(matches[namedGroups["major"]]) + if err != nil { + return nil, fmt.Errorf("parsing major version: %w", err) + } + + minor, err := strconv.Atoi(matches[namedGroups["minor"]]) + if err != nil { + return nil, fmt.Errorf("parsing minor version: %w", err) + } + + patch, err := strconv.Atoi(matches[namedGroups["patch"]]) + if err != nil { + return nil, fmt.Errorf("parsing patch version: %w", err) + } + return &ParsedSemVer{ + original: version, + major: major, + minor: minor, + patch: patch, + prerelease: matches[namedGroups["prerelease"]], + buildMetadata: matches[namedGroups["buildmetadata"]], + }, nil +} + +type SortableParsedVersions []*ParsedSemVer + +func (spv SortableParsedVersions) Len() int { return len(spv) } +func (spv SortableParsedVersions) Swap(i, j int) { spv[i], spv[j] = spv[j], spv[i] } +func (spv SortableParsedVersions) Less(i, j int) bool { return spv[i].Less(*spv[j]) } diff --git a/dev-tools/mage/version/version_parser_test.go b/dev-tools/mage/version/version_parser_test.go new file mode 100644 index 000000000000..c7df78f5e63e --- /dev/null +++ b/dev-tools/mage/version/version_parser_test.go @@ -0,0 +1,413 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package version + +import ( + "strings" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestSemVerRegexpCompiles(t *testing.T) { + require.NotNil(t, semVerFmtRegEx) + assert.Contains(t, namedGroups, "major") + assert.Contains(t, namedGroups, "minor") + assert.Contains(t, namedGroups, "patch") + assert.Contains(t, namedGroups, "prerelease") + assert.Contains(t, namedGroups, "buildmetadata") +} + +func TestParseVersion(t *testing.T) { + type expected struct { + parsed *ParsedSemVer + versionPrerelease string + err error + } + + testcases := []struct { + name string + input string + expected expected + }{ + { + name: "Simple semver", + input: "1.2.3", + expected: expected{ + parsed: &ParsedSemVer{ + original: "1.2.3", + major: 1, + minor: 2, + patch: 3, + prerelease: "", + buildMetadata: "", + }, + versionPrerelease: "1.2.3", + err: nil, + }, + }, + { + name: "Biiig semver", + input: "1111.2222.3333", + expected: expected{ + parsed: &ParsedSemVer{ + original: "1111.2222.3333", + major: 1111, + minor: 2222, + patch: 3333, + prerelease: "", + buildMetadata: "", + }, + versionPrerelease: "1111.2222.3333", + err: nil, + }, + }, + { + name: "Simple semver with spaces around", + input: " \t1.2.3 \r\n ", + expected: expected{ + parsed: &ParsedSemVer{ + original: " \t1.2.3 \r\n ", + major: 1, + minor: 2, + patch: 3, + prerelease: "", + buildMetadata: "", + }, + versionPrerelease: "1.2.3", + err: nil, + }, + }, + { + name: "Semver with prerelease", + input: "1.2.3-SNAPSHOT", + expected: expected{ + parsed: &ParsedSemVer{ + original: "1.2.3-SNAPSHOT", + major: 1, + minor: 2, + patch: 3, + prerelease: "SNAPSHOT", + buildMetadata: "", + }, + versionPrerelease: "1.2.3-SNAPSHOT", + err: nil, + }, + }, + { + name: "Semver with versioned prerelease", + input: "1.2.3-er.1+abcdef", + expected: expected{ + parsed: &ParsedSemVer{ + original: "1.2.3-er.1+abcdef", + major: 1, + minor: 2, + patch: 3, + prerelease: "er.1", + buildMetadata: "abcdef", + }, + versionPrerelease: "1.2.3-er.1", + err: nil, + }, + }, + { + name: "Semver with prerelease and build metadata", + input: "1.2.3-SNAPSHOT+abcdef", + expected: expected{ + parsed: &ParsedSemVer{ + original: "1.2.3-SNAPSHOT+abcdef", + major: 1, + minor: 2, + patch: 3, + prerelease: "SNAPSHOT", + buildMetadata: "abcdef", + }, + versionPrerelease: "1.2.3-SNAPSHOT", + err: nil, + }, + }, + { + name: "Semver string version, with double prerelease(er and snapshot)", + input: "1.2.5-er.1-SNAPSHOT", + expected: expected{ + parsed: &ParsedSemVer{ + original: "1.2.5-er.1-SNAPSHOT", + major: 1, + minor: 2, + patch: 5, + prerelease: "er.1-SNAPSHOT", + buildMetadata: "", + }, + versionPrerelease: "1.2.5-er.1-SNAPSHOT", + }, + }, + { + name: "Error truncated semver", + input: "2.3", + expected: expected{ + parsed: nil, + err: ErrNoMatch, + }, + }, + { + name: "Error missing prerelease type", + input: "1.2.3-", + expected: expected{ + parsed: nil, + err: ErrNoMatch, + }, + }, + { + name: "Error missing build metadata", + input: "1.2.3-beta.22+", + expected: expected{ + parsed: nil, + err: ErrNoMatch, + }, + }, + { + name: "Weird random string version", + input: "asdasdasdasdasd", + expected: expected{ + parsed: nil, + err: ErrNoMatch, + }, + }, + { + name: "Almost semver string version, with double extra meta separator", + input: "1.2.3++", + expected: expected{ + parsed: nil, + err: ErrNoMatch, + }, + }, + { + name: "Almost semver string version, with empty minor version", + input: "1..2+ab", + expected: expected{ + parsed: nil, + err: ErrNoMatch, + }, + }, + { + name: "Almost semver string version, with patch containing non-digits", + input: "1.2.5ab0", + expected: expected{ + parsed: nil, + err: ErrNoMatch, + }, + }, + { + name: "Split string version", + input: "4.5\r\n.6", + expected: expected{ + parsed: nil, + err: ErrNoMatch, + }, + }, + } + + for _, tc := range testcases { + t.Run(tc.name, func(t *testing.T) { + actualParsed, err := ParseVersion(tc.input) + + if tc.expected.err != nil { + assert.ErrorIs(t, err, tc.expected.err) + return + } + assert.NoError(t, err) + assert.Equal(t, tc.expected.parsed, actualParsed) + + // assert getters + assert.Equal(t, tc.expected.parsed.original, actualParsed.Original()) + assert.Equal(t, tc.expected.parsed.major, actualParsed.Major()) + assert.Equal(t, tc.expected.parsed.minor, actualParsed.Minor()) + assert.Equal(t, tc.expected.parsed.patch, actualParsed.Patch()) + assert.Equal(t, tc.expected.parsed.prerelease, actualParsed.Prerelease()) + assert.Equal(t, tc.expected.parsed.buildMetadata, actualParsed.BuildMetadata()) + assert.Equal(t, tc.expected.versionPrerelease, actualParsed.VersionWithPrerelease()) + + // verify that String() method returns the same input string (after trimming) + assert.Equal(t, strings.TrimSpace(tc.input), actualParsed.String()) + }) + } +} + +func TestIsSnapshot(t *testing.T) { + testcases := []struct { + name string + input string + snapshot bool + }{ + { + name: "Simple snapshot", + input: "8.8.0-SNAPSHOT", + snapshot: true, + }, + { + name: "Snapshot with build meta", + input: "8.8.0-SNAPSHOT+abcdef", + snapshot: true, + }, + { + name: "Snapshot comparison is case sensitive", + input: "8.8.0-sNapShOt", + snapshot: false, + }, + { + name: "Only major minor patch", + input: "8.8.0", + snapshot: false, + }, + { + name: "Alpha prerelease is not snapshot", + input: "8.8.0-alpha", + snapshot: false, + }, + { + name: "Emergency release is not snapshot", + input: "8.8.0-er.1", + snapshot: false, + }, + { + name: "Emergency release snapshot is actually a snapshot", + input: "8.8.0-er.1-SNAPSHOT ", + snapshot: true, + }, + } + + for _, tc := range testcases { + t.Run(tc.name, func(t *testing.T) { + psv, err := ParseVersion(tc.input) + require.NoError(t, err) + require.NotNil(t, psv) + assert.Equal(t, tc.snapshot, psv.IsSnapshot()) + }) + + } + +} + +func TestLess(t *testing.T) { + testcases := []struct { + name string + leftVersion string + rightVersion string + less bool + }{ + { + name: "major version less than ours", + leftVersion: "7.17.10", + rightVersion: "8.9.0", + less: true, + }, + { + name: "minor version less than ours", + leftVersion: "8.6.2", + rightVersion: "8.9.0", + less: true, + }, + { + name: "patch version less than ours", + leftVersion: "8.7.0", + rightVersion: "8.7.1", + less: true, + }, + { + name: "prerelease is always less than non-prerelease", + leftVersion: "8.9.0-SNAPSHOT", + rightVersion: "8.9.0", + less: true, + }, + { + name: "2 prereleases have no specific order", + leftVersion: "8.9.0-SNAPSHOT", + rightVersion: "8.9.0-er1", + less: false, + }, + { + name: "2 prereleases have no specific order, reversed", + leftVersion: "8.9.0-er1", + rightVersion: "8.9.0-SNAPSHOT", + less: false, + }, + { + name: "build metadata have no influence on precedence", + leftVersion: "8.9.0-SNAPSHOT+aaaaaa", + rightVersion: "8.9.0-SNAPSHOT+bbbbbb", + less: false, + }, + { + name: "build metadata have no influence on precedence, reversed", + leftVersion: "8.9.0-SNAPSHOT+bbbbbb", + rightVersion: "8.9.0-SNAPSHOT+aaaaaa", + less: false, + }, + } + + for _, tc := range testcases { + t.Run(tc.name, func(t *testing.T) { + left, err := ParseVersion(tc.leftVersion) + require.NoError(t, err) + require.NotNil(t, left) + right, err := ParseVersion(tc.rightVersion) + require.NoError(t, err) + require.NotNil(t, right) + assert.Equal(t, left.Less(*right), tc.less) + }) + } +} + +func TestPreviousMinor(t *testing.T) { + testcases := []struct { + name string + version string + prevMinorVersion string + }{ + { + name: "basic release version", + version: "8.7.0", + prevMinorVersion: "8.6.0", + }, + { + name: "snapshot release version", + version: "8.9.3-SNAPSHOT", + prevMinorVersion: "8.8.0-SNAPSHOT", + }, + { + name: "emergency release version", + version: "8.9.0-er1", + prevMinorVersion: "8.8.0-er1", + }, + { + name: "previous major version", + version: "8.0.0", + prevMinorVersion: "7.17.10", + }, + { + name: "previous major snapshot", + version: "8.0.0-SNAPSHOT", + prevMinorVersion: "7.17.10-SNAPSHOT", + }, + { + name: "snapshot version with metadata", + version: "8.9.1-SNAPSHOT+aaaaaa", + prevMinorVersion: "8.8.0-SNAPSHOT+aaaaaa", + }, + } + + for _, tc := range testcases { + t.Run(tc.name, func(t *testing.T) { + parsed, err := ParseVersion(tc.version) + require.NoError(t, err) + require.NotNil(t, parsed) + + prev, err := parsed.GetPreviousMinor() + require.NoError(t, err) + require.Equal(t, tc.prevMinorVersion, prev.String()) + }) + } +} diff --git a/x-pack/elastic-agent/magefile.go b/x-pack/elastic-agent/magefile.go index 6ac7d0db4924..5cbb9d9d7bab 100644 --- a/x-pack/elastic-agent/magefile.go +++ b/x-pack/elastic-agent/magefile.go @@ -11,6 +11,7 @@ import ( "context" "errors" "fmt" + "log" "os" "os/exec" "path/filepath" @@ -23,8 +24,11 @@ import ( "github.com/magefile/mage/mg" "github.com/magefile/mage/sh" + "github.com/elastic/beats/v7/dev-tools/mage" devtools "github.com/elastic/beats/v7/dev-tools/mage" - "github.com/elastic/beats/v7/x-pack/elastic-agent/pkg/release" + "github.com/elastic/beats/v7/dev-tools/mage/manifest" + "github.com/elastic/beats/v7/dev-tools/mage/version" + bversion "github.com/elastic/beats/v7/libbeat/version" // mage:import "github.com/elastic/beats/v7/dev-tools/mage/target/common" @@ -53,6 +57,10 @@ var Aliases = map[string]interface{}{ "demo": Demo.Enroll, } +var errNoManifest = errors.New("missing ManifestURL environment variable") +var errNoAgentDropPath = errors.New("missing AGENT_DROP_PATH environment variable") +var errAtLeastOnePlatform = errors.New("elastic-agent package is expected to build at least one platform package") + func init() { common.RegisterCheckDeps(Update, Check.All) test.RegisterDeps(UnitTest) @@ -335,6 +343,52 @@ func Ironbank() error { return devtools.Ironbank() } +func FixDRADockerArtifacts() error { + return devtools.FixDRADockerArtifacts() +} + +// DownloadManifest downloads the provided manifest file into the predefined folder +func DownloadManifest() error { + fmt.Println("--- Downloading manifest") + start := time.Now() + defer func() { fmt.Println("Downloading manifest took", time.Since(start)) }() + + dropPath, found := os.LookupEnv(agentDropPath) + + if !found { + return errNoAgentDropPath + } + + if !devtools.PackagingFromManifest { + return errNoManifest + } + + platforms := devtools.Platforms.Names() + if len(platforms) == 0 { + return errAtLeastOnePlatform + } + + platformPackages := map[string]string{ + "darwin/amd64": "darwin-x86_64.tar.gz", + "darwin/arm64": "darwin-aarch64.tar.gz", + "linux/amd64": "linux-x86_64.tar.gz", + "linux/arm64": "linux-arm64.tar.gz", + "windows/amd64": "windows-x86_64.zip", + } + + var requiredPackages []string + for _, p := range platforms { + requiredPackages = append(requiredPackages, platformPackages[p]) + } + + if e := manifest.DownloadComponentsFromManifest(devtools.ManifestURL, platforms, platformPackages, dropPath); e != nil { + return fmt.Errorf("failed to download the manifest file, %w", e) + } + log.Printf(">> Completed downloading packages from manifest into drop-in %s", dropPath) + + return nil +} + func requiredPackagesPresent(basePath, beat, version string, requiredPackages []string) bool { for _, pkg := range requiredPackages { if _, ok := os.LookupEnv(snapshotEnv); ok { @@ -348,6 +402,7 @@ func requiredPackagesPresent(basePath, beat, version string, requiredPackages [] return false } } + fmt.Printf("All packages for %s are present\n", beat) return true } @@ -392,6 +447,7 @@ func Update() { // CrossBuild cross-builds the beat for all target platforms. func CrossBuild() error { + fmt.Printf(">> Crossbuild") return devtools.CrossBuild() } @@ -566,10 +622,34 @@ func runAgent(env map[string]string) error { } func packageAgent(requiredPackages []string, packagingFn func()) { - version, found := os.LookupEnv("BEAT_VERSION") - if !found { - version = release.Version() + fmt.Println("--- Package Elastic-Agent") + var packageVersion string + // if we have defined a manifest URL to package Agent from, we should be using the same packageVersion of that manifest + if devtools.PackagingFromManifest { + fmt.Println(">>>> Using manifest to package Agent") + if manifestResponse, err := manifest.DownloadManifest(devtools.ManifestURL); err != nil { + log.Panicf("failed to download remote manifest file %s", err) + } else { + if parsedVersion, err := version.ParseVersion(manifestResponse.Version); err != nil { + log.Panicf("the manifest version from manifest is not semver, got %s", manifestResponse.Version) + } else { + // When getting the packageVersion from snapshot we should also update the env of SNAPSHOT=true which is + // something that we use as an implicit parameter to various functions + if parsedVersion.IsSnapshot() { + os.Setenv(snapshotEnv, "true") + mage.Snapshot = true + } + os.Setenv("BEAT_VERSION", parsedVersion.CoreVersion()) + } + } } + if beatVersion, found := os.LookupEnv("BEAT_VERSION"); !found { + packageVersion = bversion.GetDefaultVersion() + } else { + packageVersion = beatVersion + } + + fmt.Printf(">>> BEAT_VERSION: %s\n", packageVersion) // build deps only when drop is not provided if dropPathEnv, found := os.LookupEnv(agentDropPath); !found || len(dropPathEnv) == 0 { @@ -608,7 +688,7 @@ func packageAgent(requiredPackages []string, packagingFn func()) { panic(err) } - if !requiredPackagesPresent(pwd, b, version, requiredPackages) { + if !requiredPackagesPresent(pwd, b, packageVersion, requiredPackages) { cmd := exec.Command("mage", "package") cmd.Dir = pwd cmd.Stdout = os.Stdout @@ -617,7 +697,8 @@ func packageAgent(requiredPackages []string, packagingFn func()) { if envVar := selectedPackageTypes(); envVar != "" { cmd.Env = append(cmd.Env, envVar) } - + fmt.Println(">>> Running mage package for %s\n", b) + fmt.Println(cmd.String()) if err := cmd.Run(); err != nil { panic(err) }