diff --git a/scripts/accept_layer_merge_request.py b/scripts/accept_layer_merge_request.py index ecc0453b9c6b792df645078a30396fac44fc944b..b8a1dcbd302e48b99bee67fa1c5720b14715b859 100755 --- a/scripts/accept_layer_merge_request.py +++ b/scripts/accept_layer_merge_request.py @@ -19,6 +19,42 @@ critical_error = ( " 2. examine why this has happened and fix it in the CI pipeline" ) +#Function necessary to retrigger the integration pipelines on the BSP +def retrigger_bsp_pipelines( + gl, group_name, ci_project +): + if "nxp" in ci_project: + bsp_group = group_name + "/nxp" + elif "rockchip" in ci_project: + bsp_group = group_name + "/rockchip" + else: + print("No valid BSP vendor found, retriggering all BSP projects") + bsp_group = group_name + + print(f"This is the BSP group: {bsp_group}") + + # Find the group + group = gl.groups.get(bsp_group) + + # Get all projects within the BSP group + projects = group.projects.list(include_subgroups=True, all=True) + + for project in projects: + print(f"Checking project: {project.name}") + repo = gl.projects.get(project.id) + + # Get open merge requests + open_mrs = repo.mergerequests.list(state='opened') + + # Retrigger pipeline execution for all open MRs + for mr in open_mrs: + print(f"Running pipeline for MR: {mr.title}") + pipeline = mr.pipelines.create() + if pipeline: + print(f"Pipeline created with ID: {pipeline.id}") + else: + print("Pipeline creation failed.") + sys.exit(1) def get_source_integration_requests( project, state=None, target_branch=None, commit=None @@ -188,6 +224,12 @@ def main(): dest="recipe_name", required=True, ) + parser.add_argument( + "--bsp-group", + help="""Group containing all BSP projects""", + dest="bsp_group", + required=True, + ) parser.add_argument( "--rebase", help="""attempt to automatically rebase merge request if necessary""", @@ -237,6 +279,8 @@ def main(): else: sys.exit(1) + # Retrigger all necessary pipeline in the BSP group + retrigger_bsp_pipelines(gitlab, args.bsp_group, args.project) if __name__ == "__main__": main() diff --git a/scripts/retrigger_pipeline_jobs.py b/scripts/retrigger_pipeline_jobs.py index 11b5258d9e91ef5d758984392a780b7310358c0d..8c0b9e110601e7be5516ece3ab6fc3b891538e35 100755 --- a/scripts/retrigger_pipeline_jobs.py +++ b/scripts/retrigger_pipeline_jobs.py @@ -49,15 +49,17 @@ def retrigger_pipeline_jobs( if job.status not in status_list: return jobs - # Retrigger job - job.retry() - print("Retrigger job '%s' in %s:" % (job_name, pipeline.web_url)) - job = project.jobs.get(job.id, retry_transient_errors=True) - print(job.web_url) - jobs.append(job) - - return jobs - + # Retrigger job only if it is not in 'running' status, + # otherwise an error will be thrown (403 Forbidden - + # Job is not retryable) + + if "running" not in job.status: + job.retry() + print("Retrigger job '%s' in %s:" % (job_name, pipeline.web_url)) + job = project.jobs.get(job.id, retry_transient_errors=True) + print(job.web_url) + jobs.append(job) + return job def main(): parser = argparse.ArgumentParser()