diff --git a/tf_compare.py b/tf_compare.py index 9cd56a5..b545598 100644 --- a/tf_compare.py +++ b/tf_compare.py @@ -55,72 +55,85 @@ def get_workflow_runs(): headers['Authorization'] = f'Bearer {github_token}' headers['Accept'] = 'application/vnd.github.v3+json' - response = requests.get(f"https://api.github.com/repos/{repos[1]}/actions/workflows/{workflow}/runs?per_page=2", headers=headers, stream=True) + response = requests.get(f"https://api.github.com/repos/{repos[1]}/actions/workflows/{workflow}/runs?per_page=10", headers=headers, stream=True) if response.status_code == 200: # Parse the JSON response artifact_data = response.json() - workflow_runs = artifact_data.get("workflow_runs", []) + # Filter the runs for 'success' or 'in_progress' status (conclusion) + valid_runs = [run for run in artifact_data['workflow_runs'] if run['conclusion'] in ['success', 'in_progress']] - branch = workflow_runs[0]['head_branch'] + # check that there is at least one run that is a success + if [run for run in valid_runs['workflow_runs'] if run['conclusion'] in ['success']]: - # download workflow run log for new run that is currently in progress - allowed_workflows = {'Beta', 'Monthly', 'Red'} - if workflow_runs[0]['display_title'] in allowed_workflows and workflow_runs[1]['display_title'] in allowed_workflows and branch == 'develop': - - for index, run in enumerate(workflow_runs, start=1): - # add workflow run id to array - _artifactRunID.append(run['id']) + # Sort by the 'created' field in descending order to get the latest first + sorted_runs = sorted(valid_runs, key=lambda x: datetime.fromisoformat(x['created_at'].replace('Z', '+00:00')), reverse=True) - if index == 1: - url = f"https://api.github.com/repos/{repos[1]}/actions/runs/{run['id']}/logs" + workflow_runs = [] - # Download logs - print(f"Downloading Log files for run: {run['id']}") - response = requests.get(url, headers=headers) - time.sleep(1) - if response.status_code == 200: - print(f"Log files downloaded for run: {run['id']}") - with open("logs.zip", "wb") as f: - f.write(response.content) - testing = unzip_log_files() - - with open(testing[0], "r") as file: - content = file.read() - match = re.search(r"Runtime: Version\s*(.*)", content) - #return match.group(1) if match else None # Returns only the version part - global RTVersion - RTVersion = match.group(1) - print(f"Log files processed and Runtime version extracted: RT v{RTVersion}") - - # remove log files - # Get all files in the directory - files = glob.glob(os.path.join(baseSaveLocation, "logs.zip")) - - for file in files: - if os.path.isfile(file): # Ensure it's a file (not a folder) + # Add the latest 2 runs to an array + for run in sorted_runs[:2]: + workflow_runs.append(run) + + branch = workflow_runs[0]['head_branch'] + + # download workflow run log for new run that is currently in progress + allowed_workflows = {'Beta', 'Monthly', 'Red'} + if workflow_runs[0]['display_title'] in allowed_workflows and workflow_runs[1]['display_title'] in allowed_workflows and branch == 'develop': + + for index, run in enumerate(workflow_runs, start=1): + # add workflow run id to array + _artifactRunID.append(run['id']) + + if index == 1: + url = f"https://api.github.com/repos/{repos[1]}/actions/runs/{run['id']}/logs" + + # Download logs + print(f"Downloading Log files for run: {run['id']}") + response = requests.get(url, headers=headers) + time.sleep(1) + if response.status_code == 200: + print(f"Log files downloaded for run: {run['id']}") + with open("logs.zip", "wb") as f: + f.write(response.content) + testing = unzip_log_files() + + with open(testing[0], "r") as file: + content = file.read() + match = re.search(r"Runtime: Version\s*(.*)", content) + #return match.group(1) if match else None # Returns only the version part + global RTVersion + RTVersion = match.group(1) + print(f"Log files processed and Runtime version extracted: RT v{RTVersion}") + + # remove log files + # Get all files in the directory + files = glob.glob(os.path.join(baseSaveLocation, "logs.zip")) + + for file in files: + if os.path.isfile(file): # Ensure it's a file (not a folder) + try: + os.remove(file) + print("Log files deleted.\n") + except Exception as e: + print(f"Error deleting Log Files {file}: {e}") + + # Define the folder to be deleted + folder_to_delete = os.path.join(baseSaveLocation, "CI") + if os.path.exists(folder_to_delete) and os.path.isdir(folder_to_delete): try: - os.remove(file) - print("Log files deleted.\n") + shutil.rmtree(folder_to_delete) except Exception as e: - print(f"Error deleting Log Files {file}: {e}") - - # Define the folder to be deleted - folder_to_delete = os.path.join(baseSaveLocation, "CI") - if os.path.exists(folder_to_delete) and os.path.isdir(folder_to_delete): - try: - shutil.rmtree(folder_to_delete) - except Exception as e: - print(f"Error deleting folder {folder_to_delete}: {e}") - else: - print("Failed to fetch logs:", response.text) - get_artifact_URL() - else: - print("Valid workflow not used, only Beta, Monthly or Red on the develop branch is accepted for the TF Compare script") + print(f"Error deleting folder {folder_to_delete}: {e}") + else: + print("Failed to fetch logs:", response.text) + get_artifact_URL() + else: + print("Valid workflow not used, only Beta, Monthly or Red on the develop branch is accepted for the TF Compare script") + print(f"No successful previous runs available in the last 10 downloaded") else: print(f"Failed to download artifact. HTTP Status: {response.status_code}") - sys.exit(response.text) # Print error details