Skip to content
Snippets Groups Projects
Commit 1d5e479f authored by Tim Jaacks's avatar Tim Jaacks
Browse files

Define MACHINE variable on job level

Instead of passing MACHINE from stage to stage or loading it from
testdata.json, use the original value from the Jinja2 loop and set it
directly for each job where it is used.
parent cdd47023
No related branches found
No related tags found
1 merge request!332Define MACHINE variable on job level
Pipeline #78852 passed with stage
in 2 minutes and 49 seconds
......@@ -134,6 +134,7 @@ platformtest:{{ machine }}:
package-{{ machine }}:
extends: .package
variables:
MACHINE: {{ machine }}
PACKAGE_TYPE: image
ASSOCIATED_BUILD_JOB: build-{{ machine }}
needs:
......@@ -151,12 +152,14 @@ deploy-{{ machine }}:
{% if CI_COMMIT_TAG is defined %}
stage: Deploy SoftwareStore
variables:
MACHINE: {{ machine }}
DEPLOY_SOURCE: release/$${RELEASE_NAME}
DEPLOY_TARGET: ${DEPLOY_RELEASE_TARGET}
DEPLOY_TARGET_LINK: ${DEPLOY_RELEASE_TARGET_LINK}
{% else %}
stage: Deploy SoftwareStore Internal
variables:
MACHINE: {{ machine }}
DEPLOY_SOURCE: release/$${RELEASE_NAME}
DEPLOY_TARGET: ${DEPLOY_INTERNAL_RELEASE_TARGET}
DEPLOY_TARGET_LINK: ${DEPLOY_INTERNAL_RELEASE_TARGET_LINK}
......@@ -174,6 +177,8 @@ deploy-{{ machine }}:
# --------------------------------------------------------------------------------------
generate-alphaplan-data-{{ machine }}:
extends: .generate_alphaplan_data
variables:
MACHINE: {{ machine }}
needs:
- deploy-{{ machine }}
- build-version
......@@ -200,6 +205,7 @@ ftp-{{ machine }}:
tags:
- ftp
variables:
MACHINE: {{ machine }}
DEPLOY_SOURCE: release/$${RELEASE_NAME}
DEPLOY_TARGET: ${DEPLOY_FTP_TARGET}
DEPLOY_TARGET_LINK: ${DEPLOY_FTP_TARGET_LINK}
......@@ -228,6 +234,7 @@ azure-{{ machine }}:
when: manual
allow_failure: true
variables:
MACHINE: {{ machine }}
DEPLOY_SOURCE: release
DEPLOY_TARGET: ${AZURE_TARGET_FOLDER}
CONTAINER_NAME: ${AZURE_CONTAINER_NAME}
......
......@@ -331,8 +331,6 @@ workflow:
artifacts:
paths:
- release/**/**/*
reports:
dotenv: package.env
timeout: 90m
before_script:
# We do this manually since we do not use GitLab's default artifact downloader
......@@ -347,11 +345,13 @@ workflow:
--doc-dir=.
--output-dir=release/${RELEASE_NAME}
--release-version=${RELEASE_VERSION}
--machine=${MACHINE}
- elif [[ "${PACKAGE_TYPE}" == "sdk" ]]; then
- .gitlab-ci/scripts/package_release.py
--sdk-dir="${SDK_PATH}"
--output-dir=release/${RELEASE_NAME}
--release-version=${RELEASE_VERSION}
--machine=${MACHINE}
- fi
cache:
- key: ${CI_PIPELINE_ID}-${ASSOCIATED_BUILD_JOB}
......@@ -361,7 +361,6 @@ workflow:
policy: push
paths:
- release
- package.env
# --------------------------------------------------------------------------------------
# Stage: deploy
......@@ -371,10 +370,9 @@ workflow:
- when: manual
allow_failure: true
before_script:
# We do this manually since we do not use GitLab's default artifact downloader
- source package.env
# Save dotenv data for next stage
- echo "MACHINE=${MACHINE}" >> deploy.env
# Save MACHINE for confluence stage. This variable cannot be passed directly on the
# job definition level because the confluence stage is machine-independent.
- echo "MACHINE=${MACHINE}" > deploy.env
# Expand eventual nested variables contained within the DEPLOY_* variables
# FIXME: For now we need a double 'eval' here due to a GitLab bug:
# https://gitlab.com/gitlab-org/gitlab/-/issues/273409
......@@ -495,7 +493,6 @@ workflow:
rules:
- if: $ALPHAPLAN_STAGE == "true"
script:
# MACHINE is available from deploy.env
# RELEASE_NAME is available from build-version.env
- .gitlab-ci/scripts/generate_alphaplan_fwr_file.py
--machine="${MACHINE}"
......
......@@ -92,6 +92,12 @@ def main():
dest="release_version",
required=True,
)
parser.add_argument(
"--machine",
help="""Machine""",
dest="machine",
required=True,
)
args, _ = parser.parse_known_args()
# Get bitbake variables from testdata.json file
......@@ -118,7 +124,6 @@ def main():
with open(testdata_files[0], "r", encoding="utf-8") as f:
buildvars = json.load(f)
machine = buildvars["MACHINE"]
sdkname = buildvars["TOOLCHAIN_OUTPUTNAME"]
image_artifacts = buildvars["DISTRO_IMAGES"].split()
artifacts = buildvars["DISTRO_RELEASE_ARTEFACTS"].split()
......@@ -176,7 +181,7 @@ def main():
# Generate metadata file
generate_metadata(
machine,
args.machine,
args.release_version,
artifacts,
sdkname,
......@@ -185,23 +190,19 @@ def main():
artifacts.append(os.path.join(args.images_dir, "metainfo.json"))
# Copy files
copy_files(artifacts, os.path.join(args.output_dir, machine))
copy_files(artifacts, os.path.join(args.output_dir, args.machine))
# Package SDK
if args.sdk_dir is not None:
sdkfiles = glob.glob(os.path.join(args.sdk_dir, f"{sdkname}*"))
# Generate MD5 sums file
sdk_md5sums_file = os.path.join(machine, "sdk", "md5sums.txt")
sdk_md5sums_file = os.path.join(args.machine, "sdk", "md5sums.txt")
generate_md5sums_file(sdkfiles, sdk_md5sums_file)
sdkfiles.append(sdk_md5sums_file)
# Copy files
copy_files(sdkfiles, os.path.join(args.output_dir, machine, "sdk"))
# Store pathes and other stuff in environment variable file
with open("package.env", "w", encoding="utf-8") as env_file:
env_file.write(f"MACHINE={machine}\n")
copy_files(sdkfiles, os.path.join(args.output_dir, args.machine, "sdk"))
if __name__ == "__main__":
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment