Skip to content
Snippets Groups Projects
Commit dc3698c8 authored by Tim Jaacks's avatar Tim Jaacks
Browse files

package_release: refactoring

Improve readability of package_release.py and its submodule
generate_release_metadata.py.

- Add describing comments throughout the file.
- Remove duplicate code from functions. Simplify them, so that they do
  what their names say (and nothing more).
- Rename variables and function arguments to make their purpose clearer.
- Harmonize variable names with their corresponding command line arguments.
- Add pydoc for functions.
- Add type declarations to function arguments.
- Generate MD5 and metadata files in the source directories so that they
  can be copied along with the rest of the files.
parent c831d1da
No related branches found
No related tags found
1 merge request!213package_release: refactoring
Pipeline #32818 passed with stages
in 1 minute
......@@ -7,39 +7,29 @@ from datetime import datetime
def generate_metadata(
machine,
version,
artifacts_image,
image_artifacts,
sdk,
output_dir,
outlocal_dir,
output_file,
):
"""Generates a metainfo.json for the release"""
"""Generate a metainfo.json for the release and write it to output_file."""
install_script = None
licenses = None
image_general = None
image_wic = None
# Join filepath for metadata
if output_dir is not None:
filepath = os.path.join(output_dir, machine, "metainfo.json")
elif outlocal_dir is not None:
filepath = os.path.join(outlocal_dir, machine, "metainfo.json")
else:
print("Error: Filepath is empty")
return -1
# Collect metadata and write to metainfo.json
for artifact in artifacts_image:
if artifact == "fng-install.sh":
install_script = artifact
elif artifact == "license.manifest":
licenses = artifact
elif artifact.endswith(machine + ".tar.gz"):
image_general = artifact
elif artifact.endswith(machine + ".wic"):
image_wic = artifact
for artifact in image_artifacts:
filename = os.path.basename(artifact)
if filename == "fng-install.sh":
install_script = filename
elif filename == "license.manifest":
licenses = filename
elif filename.endswith(machine + ".tar.gz"):
image_general = filename
elif filename.endswith(machine + ".wic"):
image_wic = filename
metadata = dict()
......@@ -78,5 +68,5 @@ def generate_metadata(
new_file["path"] = licenses
metadata["files"].append(new_file)
with open(filepath, "w", encoding="utf-8") as file:
with open(output_file, "w", encoding="utf-8") as file:
file.write(json.dumps(metadata))
......@@ -6,74 +6,53 @@ import os
import sys
import shutil
import hashlib
import tempfile
from convert_md2html import convertmd2html
from generate_release_metadata import generate_metadata
def md5(fname):
def md5(input_file: str):
"""
Calculate and return the MD5 sum of the given input_file.
"""
hash_md5 = hashlib.md5()
with open(fname, "rb", encoding=None) as f:
with open(input_file, "rb", encoding=None) as f:
for chunk in iter(lambda: f.read(4096), b""):
hash_md5.update(chunk)
return hash_md5.hexdigest()
def write_md5sums_file(md5sums, subdir, output_dir, outlocal_dir):
with tempfile.TemporaryDirectory() as tmp:
source_file = os.path.join(tmp, subdir, "md5sums.txt")
os.makedirs(os.path.dirname(source_file), exist_ok=True)
with open(source_file, "w", encoding="utf-8") as f_md5:
for f, h in md5sums.items():
f_md5.write("{} {}\n".format(h, f))
if output_dir is not None:
target_file = os.path.join(
output_dir, subdir, os.path.basename(source_file)
)
print("Copy: %s -> %s" % (source_file, target_file))
shutil.copyfile(source_file, target_file, follow_symlinks=True)
if outlocal_dir is not None:
target_file = os.path.join(
outlocal_dir, subdir, os.path.basename(source_file)
)
print("Copy: %s -> %s" % (source_file, target_file))
shutil.copyfile(source_file, target_file, follow_symlinks=True)
def copy_files(files, input_dir, subdir, output_dir, outlocal_dir):
def generate_md5sums_file(input_files: list[str], output_file: str):
"""
Calculate MD5 sums of all input_files and write them to output_file.
"""
md5sums = {}
if output_dir is not None:
os.makedirs(os.path.join(output_dir, subdir), exist_ok=True)
if outlocal_dir is not None:
os.makedirs(os.path.join(outlocal_dir, subdir), exist_ok=True)
for f in files:
source_file = os.path.join(input_dir, f)
for f in input_files:
md5sums[os.path.basename(f)] = md5(f)
output_dir = os.path.dirname(output_file)
if output_dir:
os.makedirs(output_dir, exist_ok=True)
with open(output_file, "w", encoding="utf-8") as f_md5:
for f, h in md5sums.items():
f_md5.write("{} {}\n".format(h, f))
def copy_files(files: list[str], target_dir: str):
"""
Copy given files to target_dir. Create target_dir, if it does not exist. Subfolder
hierarchies on the input files will not be preserved, only plain files are copied.
"""
if target_dir is None:
return
os.makedirs(target_dir, exist_ok=True)
for source_file in files:
if os.path.exists(source_file):
if output_dir is not None:
target_file = os.path.join(
output_dir, subdir, os.path.basename(source_file)
)
print("Copy: %s -> %s" % (source_file, target_file))
shutil.copyfile(source_file, target_file, follow_symlinks=True)
if outlocal_dir is not None:
target_file = os.path.join(
outlocal_dir, subdir, os.path.basename(source_file)
)
print("Copy: %s -> %s" % (source_file, target_file))
shutil.copyfile(source_file, target_file, follow_symlinks=True)
md5sums[os.path.basename(source_file)] = md5(source_file)
target_file = os.path.join(target_dir, os.path.basename(source_file))
print("Copy: %s -> %s" % (source_file, target_file))
shutil.copyfile(source_file, target_file, follow_symlinks=True)
else:
print("Missing: " + source_file)
return md5sums
def main():
parser = argparse.ArgumentParser()
......@@ -121,7 +100,7 @@ def main():
if args.sdk_dir is not None:
testdata_files += glob.glob(os.path.join(args.sdk_dir, "*.testdata.json"))
# Debug stuff
# Debug output if no testdata file found
if not testdata_files:
if args.images_dir is not None:
print(args.images_dir)
......@@ -133,37 +112,39 @@ def main():
print("-- ", f)
sys.exit("ERROR: no *.testdata.json file found in image or sdk dir.")
# The required build variables from testdata have the same values for image and SDK
# builds, so we just read one of them.
with open(testdata_files[0], "r", encoding="utf-8") as f:
buildvars = json.load(f)
machine = buildvars["MACHINE"]
version = buildvars["DISTRO_VERSION"]
sdkname = buildvars["TOOLCHAIN_OUTPUTNAME"]
artifacts_image = buildvars["DISTRO_IMAGES"].split()
artifacts_all = buildvars["DISTRO_RELEASE_ARTEFACTS"].split()
artifacts_all.append("BUILD_SRCREVS.log")
image_artifacts = buildvars["DISTRO_IMAGES"].split()
artifacts = buildvars["DISTRO_RELEASE_ARTEFACTS"].split()
artifacts.append("BUILD_SRCREVS.log")
# Set release name
if version.startswith("fngsystem"):
release_name_local = version.replace("fngsystem", "FNGSystem")
# outlocal_base = "/artifacts-fngsystem"
release_name = version.replace("fngsystem", "FNGSystem")
else:
release_name_local = "Yocto-%s" % version
# outlocal_base = "/artifacts-yocto"
release_name = "Yocto-%s" % version
# Create output directories
if args.outputdir_upload is not None:
output_dir = os.path.join(args.outputdir_upload, release_name_local)
os.makedirs(output_dir, exist_ok=True)
outputdir_upload = os.path.join(args.outputdir_upload, release_name)
os.makedirs(outputdir_upload, exist_ok=True)
else:
output_dir = None
outputdir_upload = None
if args.outputdir_local is not None:
outlocal_dir = os.path.join(args.outputdir_local, release_name_local)
os.makedirs(outlocal_dir, exist_ok=True)
outputdir_local = os.path.join(args.outputdir_local, release_name)
os.makedirs(outputdir_local, exist_ok=True)
else:
outlocal_dir = None
outputdir_local = None
# Convert markdown files into html and package them
# Package documentation files
if args.doc_dir is not None:
# Convert markdown to html
doc_files = glob.glob(os.path.join(args.doc_dir, "*.md"))
html_files = []
for f in doc_files:
......@@ -171,20 +152,28 @@ def main():
convertmd2html(f, fout)
html_files.append(fout)
doc_md5sums = copy_files(
doc_files + html_files, "", "", output_dir, outlocal_dir
)
write_md5sums_file(doc_md5sums, "", output_dir, outlocal_dir)
files = doc_files + html_files
# Generate MD5 sums file
doc_md5sums_file = "md5sums.txt"
generate_md5sums_file(files, doc_md5sums_file)
files.append(doc_md5sums_file)
# Copy files
if outputdir_upload is not None:
copy_files(files, outputdir_upload)
if outputdir_local is not None:
copy_files(files, outputdir_local)
# Package image files
if args.images_dir is not None:
# Add some additional files to the artifacts
for artifact in artifacts_image:
artifacts_all.append(artifact.split(".")[0] + ".manifest")
artifacts_all.append(artifact.split(".")[0] + ".testdata.json")
for artifact in image_artifacts:
artifacts.append(artifact.split(".")[0] + ".manifest")
artifacts.append(artifact.split(".")[0] + ".testdata.json")
md5sums = copy_files(
artifacts_all, args.images_dir, machine, output_dir, outlocal_dir
)
# Prepend path to artifacts
artifacts = [os.path.join(args.images_dir, artifact) for artifact in artifacts]
# If the path for the licenses is set, we check for the list with all
# licenses. If the list is found, we copy it to the output directory
......@@ -194,52 +183,49 @@ def main():
license_manifest = glob.glob(
os.path.join(args.licenses_dir, "**", "license.manifest")
)
artifacts.append(
os.path.join(os.path.dirname(license_manifest[0]), "license.manifest")
)
if license_manifest:
md5sums.update(
copy_files(
["license.manifest"],
os.path.dirname(license_manifest[0]),
machine,
output_dir,
outlocal_dir,
)
)
artifacts_all.append("license.manifest")
# Create md5sums file for build artifacts
write_md5sums_file(md5sums, machine, output_dir, outlocal_dir)
# Generate metadata in case of an image build
if args.sdk_dir is None:
# Generate MD5 sums file
image_md5sums_file = os.path.join(args.images_dir, "md5sums.txt")
generate_md5sums_file(artifacts, image_md5sums_file)
artifacts.append(image_md5sums_file)
# Generate metadata file
generate_metadata(
machine,
version,
artifacts_all,
artifacts,
sdkname,
output_dir,
outlocal_dir,
os.path.join(args.images_dir, "metainfo.json"),
)
artifacts.append(os.path.join(args.images_dir, "metainfo.json"))
# Copy files
if outputdir_upload is not None:
copy_files(artifacts, os.path.join(outputdir_upload, machine))
if outputdir_local is not None:
copy_files(artifacts, os.path.join(outputdir_local, machine))
# Handle SDK if available
# Package SDK
if args.sdk_dir is not None:
sdkfiles = glob.glob(os.path.join(args.sdk_dir, sdkname + "*"))
print("package_release.py: Upload the following sdk files:")
print(sdkfiles)
sdk_md5sums = copy_files(
sdkfiles, "", os.path.join(machine, "sdk"), None, outlocal_dir
)
write_md5sums_file(
sdk_md5sums, os.path.join(machine, "sdk"), None, outlocal_dir
)
else:
print("package_release.py: sdk_dir is not specified")
# Generate MD5 sums file
sdk_md5sums_file = os.path.join(machine, "sdk", "md5sums.txt")
generate_md5sums_file(sdkfiles, sdk_md5sums_file)
sdkfiles.append(sdk_md5sums_file)
# Copy files
if outputdir_local is not None:
copy_files(sdkfiles, os.path.join(outputdir_local, machine, "sdk"))
# Store pathes and other stuff in environment variable file
with open("package.env", "w", encoding="utf-8") as env_file:
env_file.write("VERSION={}\n".format(version))
env_file.write("MACHINE={}\n".format(machine))
env_file.write("LOCALDIR={}\n".format(outlocal_dir))
env_file.write("LOCALDIR={}\n".format(outputdir_local))
if __name__ == "__main__":
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment