diff --git a/.github/workflows/post-release-workflow.yml b/.github/workflows/post-release-workflow.yml index 0eb7d406..ac156d1e 100644 --- a/.github/workflows/post-release-workflow.yml +++ b/.github/workflows/post-release-workflow.yml @@ -1,112 +1,28 @@ -name: Post Release Workflow +name: Post Release Tasks on: - workflow_dispatch: # Enables manual trigger + workflow_dispatch: + inputs: + github_ref_name: + description: "Current release tag in this train (e.g., v1.15.3)" + required: true + previous_ref_name: + description: "Previous release tag in this train (e.g., v1.14.8)" + required: false jobs: - generate-release-notes: - name: Generate Release Notes + post_release: runs-on: ubuntu-latest steps: - - name: Check out the repository - uses: actions/checkout@v3 - - - name: Download Changelog Generator - run: | - curl -L -o github-changelog-generator.jar https://github.com/spring-io/github-changelog-generator/releases/download/v0.0.11/github-changelog-generator.jar - - - name: Generate release notes - id: generate_notes - run: | - java -jar github-changelog-generator.jar \ - ${GITHUB_REF_NAME#v} \ - changelog.md \ - --changelog.repository="${{ github.repository }}" \ - --github.token="${{ secrets.GITHUB_TOKEN }}" - - - name: Run script to process Markdown file - run: python .github/workflows/process_changelog.py - - - name: Update release text - run: | - echo -e "::Info::Original changelog\n\n" - cat changelog.md - - echo -e "\n\n" - echo -e "::Info::Processed changelog\n\n" - cat changelog-output.md - gh release edit ${{ github.ref_name }} --notes-file changelog-output.md - env: - GH_TOKEN: ${{ secrets.GH_ACTIONS_REPO_TOKEN }} - - close-milestone: - name: Close Milestone - runs-on: ubuntu-latest - needs: generate-release-notes - steps: - - name: Close milestone - run: | - # Extract version without 'v' prefix - milestone_name=${GITHUB_REF_NAME#v} - - echo "Closing milestone: $milestone_name" - - # List milestones and find the ID - milestone_id=$(gh api "/repos/${{ github.repository }}/milestones?state=open" \ - --jq ".[] | select(.title == \"$milestone_name\").number") - - if [ -z "$milestone_id" ]; then - echo "::error::Milestone '$milestone_name' not found" - exit 1 - fi - - # Close the milestone - gh api --method PATCH "/repos/${{ github.repository }}/milestones/$milestone_id" \ - -f state=closed - - echo "Successfully closed milestone: $milestone_name" - env: - GH_TOKEN: ${{ secrets.GH_ACTIONS_REPO_TOKEN }} - - notify: - name: Send Notifications - runs-on: ubuntu-latest - needs: close-milestone - - steps: - - name: Announce Release on `Spring-Releases` space - run: | - milestone_name=${GITHUB_REF_NAME#v} - curl --location --request POST '${{ secrets.SPRING_RELEASE_GCHAT_WEBHOOK_URL }}' \ - --header 'Content-Type: application/json' \ - --data-raw "{ text: \"${{ github.event.repository.name }}-announcing ${milestone_name}\"}" - - - name: Post on Bluesky - env: - BSKY_IDENTIFIER: ${{ secrets.BLUESKY_HANDLE }} - BSKY_PASSWORD: ${{ secrets.BLUESKY_PASSWORD }} - run: | - # First get the session token - SESSION_TOKEN=$(curl -s -X POST https://bsky.social/xrpc/com.atproto.server.createSession \ - -H "Content-Type: application/json" \ - -d "{\"identifier\":\"$BSKY_IDENTIFIER\",\"password\":\"$BSKY_PASSWORD\"}" | \ - jq -r .accessJwt) - - # Create post content - VERSION=${GITHUB_REF_NAME#v} - POST_TEXT="${{ github.event.repository.name }} ${VERSION} has been released!\n\nCheck out the changelog: https://github.com/${GITHUB_REPOSITORY}/releases/tag/${GITHUB_REF_NAME}" - - # Create the post - curl -X POST https://bsky.social/xrpc/com.atproto.repo.createRecord \ - -H "Content-Type: application/json" \ - -H "Authorization: Bearer ${SESSION_TOKEN}" \ - -d "{ - \"repo\": \"$BSKY_IDENTIFIER\", - \"collection\": \"app.bsky.feed.post\", - \"record\": { - \"\$type\": \"app.bsky.feed.post\", - \"text\": \"$POST_TEXT\", - \"createdAt\": \"$(date -u +"%Y-%m-%dT%H:%M:%S.000Z")\" - } - }" + - uses: actions/checkout@v4 + + - name: Run Post Release Tasks + uses: marcingrzejszczak/micrometer-release@v0.0.1 + with: + gh_token: ${{ secrets.GH_ACTIONS_REPO_TOKEN }} + github_ref_name: ${{ github.event.inputs.github_ref_name }} + previous_ref_name: ${{ github.event.inputs.previous_ref_name }} + spring_release_gchat_webhook_url: ${{ secrets.SPRING_RELEASE_GCHAT_WEBHOOK_URL }} + bluesky_handle: ${{ secrets.BLUESKY_HANDLE }} + bluesky_password: ${{ secrets.BLUESKY_PASSWORD }} diff --git a/.github/workflows/process_changelog.py b/.github/workflows/process_changelog.py deleted file mode 100644 index 58461840..00000000 --- a/.github/workflows/process_changelog.py +++ /dev/null @@ -1,148 +0,0 @@ -import re -import subprocess - -input_file = "changelog.md" -output_file = "changelog-output.md" - -def fetch_test_and_optional_dependencies(): - # Fetch the list of all subprojects - result = subprocess.run( - ["./gradlew", "projects"], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - text=True, - ) - subprojects = [] - for line in result.stdout.splitlines(): - match = re.match(r".*Project (':.+')", line) - if match: - subprojects.append(match.group(1).strip("'")) - - print(f"Found the following subprojects\n\n {subprojects}\n\n") - test_optional_dependencies = set() - implementation_dependencies = set() - - print("Will fetch non transitive dependencies for all subprojects...") - # Run dependencies task for all subprojects in a single Gradle command - if subprojects: - dependencies_command = ["./gradlew"] + [f"{subproject}:dependencies" for subproject in subprojects] - result = subprocess.run( - dependencies_command, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - text=True, - ) - in_test_section = False - in_optional_section = False - in_implementation_section = False - - for line in result.stdout.splitlines(): - if "project :" in line: - continue - - # Detect gradle plugin - if "classpath" in line: - in_optional_section = True - continue - - # Detect test dependencies section - if "testCompileClasspath" in line or "testImplementation" in line: - in_test_section = True - continue - if "runtimeClasspath" in line or line.strip() == "": - in_test_section = False - - # Detect optional dependencies section - if "compileOnly" in line: - in_optional_section = True - continue - if line.strip() == "": - in_optional_section = False - - # Detect implementation dependencies section - if "implementation" in line or "compileClasspath" in line: - in_implementation_section = True - continue - if line.strip() == "": - in_implementation_section = False - - # Parse dependencies explicitly declared with +--- or \--- - match = re.match(r"[\\+|\\\\]--- ([^:]+):([^:]+):([^ ]+)", line) - if match: - group_id, artifact_id, _ = match.groups() - dependency_key = f"{group_id}:{artifact_id}" - if in_test_section or in_optional_section: - test_optional_dependencies.add(dependency_key) - if in_implementation_section: - implementation_dependencies.add(dependency_key) - - # Remove dependencies from test/optional if they are also in implementation - final_exclusions = test_optional_dependencies - implementation_dependencies - - print(f"Dependencies in either test or optional scope to be excluded from changelog processing:\n\n{final_exclusions}\n\n") - return final_exclusions - -def process_dependency_upgrades(lines, exclude_dependencies): - dependencies = {} - regex = re.compile(r"- Bump (.+?) from ([\d\.]+) to ([\d\.]+) \[(#[\d]+)\]\((.+)\)") - for line in lines: - match = regex.match(line) - if match: - unit, old_version, new_version, pr_number, link = match.groups() - if unit not in exclude_dependencies: - if unit not in dependencies: - dependencies[unit] = {"lowest": old_version, "highest": new_version, "pr_number": pr_number, "link": link} - else: - dependencies[unit]["lowest"] = min(dependencies[unit]["lowest"], old_version) - dependencies[unit]["highest"] = max(dependencies[unit]["highest"], new_version) - sorted_units = sorted(dependencies.keys()) - return [f"- Bump {unit} from {dependencies[unit]['lowest']} to {dependencies[unit]['highest']} [{dependencies[unit]['pr_number']}]({dependencies[unit]['link']})" for unit in sorted_units] - -with open(input_file, "r") as file: - lines = file.readlines() - -# Fetch test and optional dependencies from all projects -print("Fetching test and optional dependencies from the project and its subprojects...") -exclude_dependencies = fetch_test_and_optional_dependencies() - -# Step 1: Copy all content until the hammer line -header = [] -dependency_lines = [] -footer = [] -in_dependency_section = False - -print("Parsing changelog until the dependency upgrades section...") - -for line in lines: - if line.startswith("## :hammer: Dependency Upgrades"): - in_dependency_section = True - header.append(line) - header.append("\n") - break - header.append(line) - -print("Parsing dependency upgrade section...") - -# Step 2: Parse dependency upgrades -if in_dependency_section: - for line in lines[len(header):]: - if line.startswith("## :heart: Contributors"): - break - dependency_lines.append(line) - -print("Parsing changelog to find everything after the dependency upgrade section...") -# Find the footer starting from the heart line -footer_start_index = next((i for i, line in enumerate(lines) if line.startswith("## :heart: Contributors")), None) -if footer_start_index is not None: - footer = lines[footer_start_index:] - -print("Processing the dependency upgrades section...") -processed_dependencies = process_dependency_upgrades(dependency_lines, exclude_dependencies) - -print("Writing output...") -# Step 3: Write the output file -with open(output_file, "w") as file: - file.writelines(header) - file.writelines(f"{line}\n" for line in processed_dependencies) - file.writelines("\n") - file.writelines(footer)