Skip to content

Commit

Permalink
Merge pull request autoatml#255 from naik-aakash/combine_workflows
Browse files Browse the repository at this point in the history
Fix recursive autoupdate durations
  • Loading branch information
naik-aakash authored Nov 15, 2024
2 parents 5f54d10 + 352c6b1 commit 40a586b
Show file tree
Hide file tree
Showing 5 changed files with 107 additions and 30 deletions.
103 changes: 82 additions & 21 deletions .github/scripts/average_test_durations.py
Original file line number Diff line number Diff line change
@@ -1,30 +1,91 @@
# .github/scripts/average_test_durations.py
# Description: This script averages the test durations from all the artifacts
import glob
import json
import subprocess
from collections import defaultdict
import os

# Function to collect test names using pytest
def collect_tests_with_pytest():
# Run pytest with --collect-only to get the list of test cases
result = subprocess.run(
["pytest", "--collect-only"],
capture_output=True,
text=True
)

collected_tests = defaultdict(list)

# Parse the output to organize tests under their respective modules
for line in result.stdout.splitlines():
if line.startswith("tests/"):
collected_tests[line] = 0

return collected_tests


# Consolidate durations from existing artifacts
def consolidate_durations():
durations = defaultdict(lambda: {'total_duration': 0, 'count': 0})

# Iterate over all downloaded duration artifacts
for folder in glob.glob("test-durations-*"):
# The path to the duration file in each directory
duration_file_path = os.path.join(folder, ".pytest-split-durations")

if os.path.isfile(duration_file_path):
with open(duration_file_path, "r") as f:
data = json.load(f)
for test, duration in data.items():
durations[test]['total_duration'] += duration
durations[test]['count'] += 1

# Calculate the average duration for each test
return {test: info['total_duration'] / info['count'] for test, info in durations.items()}

# Define the path to the consolidated durations file
consolidated_file = "tests/test_data/.pytest-split-durations"
CONSOLIDATED_FILE = "tests/test_data/.pytest-split-durations"

# Dictionary to store total duration and count for each test
durations = defaultdict(lambda: {'total_duration': 0, 'count': 0})

# Iterate over all downloaded duration artifacts
for folder in glob.glob("test-durations-*"):
# The path to the duration file in each directory
duration_file_path = os.path.join(folder, ".pytest-split-durations")
# Main script logic
def main():
# Collect tests grouped by modules using pytest
collected_tests = collect_tests_with_pytest()

# Consolidate durations from artifacts
consolidated_durations = consolidate_durations()


# Merge and update with consolidated durations
updated_durations = {}
for test, duration in collected_tests.items():
if test in consolidated_durations:
# Update average duration if test exists in consolidated data
updated_durations[test] = consolidated_durations[test]
else:
# Add new test with its collected duration
updated_durations[test] = duration

# Load the existing durations file if it exists
existing_durations = {}
if os.path.isfile(CONSOLIDATED_FILE):
with open(CONSOLIDATED_FILE, "r") as f:
existing_durations = json.load(f)


if os.path.isfile(duration_file_path):
with open(duration_file_path, "r") as f:
data = json.load(f)
for test, duration in data.items():
durations[test]['total_duration'] += duration
durations[test]['count'] += 1

# Calculate the average duration for each test
averaged_durations = {test: info['total_duration'] / info['count'] for test, info in durations.items()}

# Write the averaged durations to the consolidated file
with open(consolidated_file, "w") as f:
json.dump(averaged_durations, f, indent=4)
# Sort the keys to compare the tests in both dictionaries
updated_durations_key = sorted(updated_durations.keys())
existing_durations_key = sorted(existing_durations.keys())


# Check if all keys in updated_durations are in existing_durations
if updated_durations_key == existing_durations_key:
print("No new tests detected; durations file remains unchanged.")
else:
# Write the updated durations to the consolidated file
with open(CONSOLIDATED_FILE, "w") as f:
json.dump(updated_durations, f, indent=4)
print("New tests detected; updated the durations file.")

if __name__ == "__main__":
main()
4 changes: 2 additions & 2 deletions .github/workflows/docker-publish.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ jobs:

steps:
- name: Checkout code
uses: actions/checkout@v3
uses: actions/checkout@v4

- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
Expand Down Expand Up @@ -53,7 +53,7 @@ jobs:
docker push ghcr.io/${{ env.IMAGE_NAME }}/autoplex-python-${{ matrix.python-version }}:${{ env.VERSION }}
update-devcontainer:
if: github.repository_owner == 'autoatml' && github.ref == 'refs/heads/main'
if: github.event_name == 'release' && github.event.action == 'created'
needs: build-image
runs-on: ubuntu-latest
steps:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/pylint.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ jobs:
lint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4

- uses: actions/setup-python@v4
with:
Expand Down
26 changes: 21 additions & 5 deletions .github/workflows/python-package.yml
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ jobs:
| jq -r 'sort_by(.created_at) | reverse | .[0].metadata.container.tags[0]')
echo "VERSION=$TAG" >> $GITHUB_ENV
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Run tests using Docker image for Python ${{ matrix.python-version }}
run: |
docker pull ghcr.io/autoatml/autoplex/autoplex-python-${{ matrix.python-version }}:${{ env.VERSION }}
Expand Down Expand Up @@ -68,17 +68,33 @@ jobs:
if: github.repository_owner == 'autoatml' && github.ref == 'refs/heads/main'
needs: build
runs-on: ubuntu-latest
defaults:
run:
shell: bash -l {0} # enables conda/mamba env activation by reading bash profile

steps:
- uses: actions/checkout@v3

- name: Check out repo
uses: actions/checkout@v4
- name: Set up micromamba
uses: mamba-org/setup-micromamba@main
- name: Create mamba environment
run: |
micromamba create -n autoplex_tests python=3.10 --yes
- name: Install uv
run: micromamba run -n autoplex_tests pip install uv
- name: Install autoplex and dependencies
run: |
micromamba activate autoplex_tests
uv pip install --upgrade pip
uv pip install --prerelease=allow .[tests,strict]
- name: Download test duration artifacts
uses: actions/download-artifact@v3

- name: Compute average of test durations
run: |
# Clear out any existing content in the consolidated file
> tests/test_data/.pytest-split-durations
# Run the Python script to average out test durations
micromamba activate autoplex_tests
python3 .github/scripts/average_test_durations.py
rm -rf test-durations-*
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/python-publish.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ jobs:
runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v4
with:
Expand Down

0 comments on commit 40a586b

Please sign in to comment.