Skip to content
Snippets Groups Projects
Commit dc3698c8 authored by Tim Jaacks's avatar Tim Jaacks
Browse files

package_release: refactoring

Improve readability of package_release.py and its submodule
generate_release_metadata.py.

- Add describing comments throughout the file.
- Remove duplicate code from functions. Simplify them, so that they do
  what their names say (and nothing more).
- Rename variables and function arguments to make their purpose clearer.
- Harmonize variable names with their corresponding command line arguments.
- Add pydoc for functions.
- Add type declarations to function arguments.
- Generate MD5 and metadata files in the source directories so that they
  can be copied along with the rest of the files.
parent c831d1da
No related branches found
No related tags found
1 merge request!213package_release: refactoring
Pipeline #32818 passed with stages
in 1 minute
...@@ -7,39 +7,29 @@ from datetime import datetime ...@@ -7,39 +7,29 @@ from datetime import datetime
def generate_metadata( def generate_metadata(
machine, machine,
version, version,
artifacts_image, image_artifacts,
sdk, sdk,
output_dir, output_file,
outlocal_dir,
): ):
"""Generates a metainfo.json for the release""" """Generate a metainfo.json for the release and write it to output_file."""
install_script = None install_script = None
licenses = None licenses = None
image_general = None image_general = None
image_wic = None image_wic = None
# Join filepath for metadata
if output_dir is not None:
filepath = os.path.join(output_dir, machine, "metainfo.json")
elif outlocal_dir is not None:
filepath = os.path.join(outlocal_dir, machine, "metainfo.json")
else:
print("Error: Filepath is empty")
return -1
# Collect metadata and write to metainfo.json # Collect metadata and write to metainfo.json
for artifact in artifacts_image: for artifact in image_artifacts:
if artifact == "fng-install.sh": filename = os.path.basename(artifact)
install_script = artifact if filename == "fng-install.sh":
elif artifact == "license.manifest": install_script = filename
licenses = artifact elif filename == "license.manifest":
elif artifact.endswith(machine + ".tar.gz"): licenses = filename
image_general = artifact elif filename.endswith(machine + ".tar.gz"):
elif artifact.endswith(machine + ".wic"): image_general = filename
image_wic = artifact elif filename.endswith(machine + ".wic"):
image_wic = filename
metadata = dict() metadata = dict()
...@@ -78,5 +68,5 @@ def generate_metadata( ...@@ -78,5 +68,5 @@ def generate_metadata(
new_file["path"] = licenses new_file["path"] = licenses
metadata["files"].append(new_file) metadata["files"].append(new_file)
with open(filepath, "w", encoding="utf-8") as file: with open(output_file, "w", encoding="utf-8") as file:
file.write(json.dumps(metadata)) file.write(json.dumps(metadata))
...@@ -6,74 +6,53 @@ import os ...@@ -6,74 +6,53 @@ import os
import sys import sys
import shutil import shutil
import hashlib import hashlib
import tempfile
from convert_md2html import convertmd2html from convert_md2html import convertmd2html
from generate_release_metadata import generate_metadata from generate_release_metadata import generate_metadata
def md5(fname): def md5(input_file: str):
"""
Calculate and return the MD5 sum of the given input_file.
"""
hash_md5 = hashlib.md5() hash_md5 = hashlib.md5()
with open(fname, "rb", encoding=None) as f: with open(input_file, "rb", encoding=None) as f:
for chunk in iter(lambda: f.read(4096), b""): for chunk in iter(lambda: f.read(4096), b""):
hash_md5.update(chunk) hash_md5.update(chunk)
return hash_md5.hexdigest() return hash_md5.hexdigest()
def write_md5sums_file(md5sums, subdir, output_dir, outlocal_dir): def generate_md5sums_file(input_files: list[str], output_file: str):
with tempfile.TemporaryDirectory() as tmp: """
source_file = os.path.join(tmp, subdir, "md5sums.txt") Calculate MD5 sums of all input_files and write them to output_file.
os.makedirs(os.path.dirname(source_file), exist_ok=True) """
with open(source_file, "w", encoding="utf-8") as f_md5:
for f, h in md5sums.items():
f_md5.write("{} {}\n".format(h, f))
if output_dir is not None:
target_file = os.path.join(
output_dir, subdir, os.path.basename(source_file)
)
print("Copy: %s -> %s" % (source_file, target_file))
shutil.copyfile(source_file, target_file, follow_symlinks=True)
if outlocal_dir is not None:
target_file = os.path.join(
outlocal_dir, subdir, os.path.basename(source_file)
)
print("Copy: %s -> %s" % (source_file, target_file))
shutil.copyfile(source_file, target_file, follow_symlinks=True)
def copy_files(files, input_dir, subdir, output_dir, outlocal_dir):
md5sums = {} md5sums = {}
if output_dir is not None: for f in input_files:
os.makedirs(os.path.join(output_dir, subdir), exist_ok=True) md5sums[os.path.basename(f)] = md5(f)
if outlocal_dir is not None: output_dir = os.path.dirname(output_file)
os.makedirs(os.path.join(outlocal_dir, subdir), exist_ok=True) if output_dir:
os.makedirs(output_dir, exist_ok=True)
for f in files: with open(output_file, "w", encoding="utf-8") as f_md5:
source_file = os.path.join(input_dir, f) for f, h in md5sums.items():
f_md5.write("{} {}\n".format(h, f))
def copy_files(files: list[str], target_dir: str):
"""
Copy given files to target_dir. Create target_dir, if it does not exist. Subfolder
hierarchies on the input files will not be preserved, only plain files are copied.
"""
if target_dir is None:
return
os.makedirs(target_dir, exist_ok=True)
for source_file in files:
if os.path.exists(source_file): if os.path.exists(source_file):
target_file = os.path.join(target_dir, os.path.basename(source_file))
if output_dir is not None: print("Copy: %s -> %s" % (source_file, target_file))
target_file = os.path.join( shutil.copyfile(source_file, target_file, follow_symlinks=True)
output_dir, subdir, os.path.basename(source_file)
)
print("Copy: %s -> %s" % (source_file, target_file))
shutil.copyfile(source_file, target_file, follow_symlinks=True)
if outlocal_dir is not None:
target_file = os.path.join(
outlocal_dir, subdir, os.path.basename(source_file)
)
print("Copy: %s -> %s" % (source_file, target_file))
shutil.copyfile(source_file, target_file, follow_symlinks=True)
md5sums[os.path.basename(source_file)] = md5(source_file)
else: else:
print("Missing: " + source_file) print("Missing: " + source_file)
return md5sums
def main(): def main():
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
...@@ -121,7 +100,7 @@ def main(): ...@@ -121,7 +100,7 @@ def main():
if args.sdk_dir is not None: if args.sdk_dir is not None:
testdata_files += glob.glob(os.path.join(args.sdk_dir, "*.testdata.json")) testdata_files += glob.glob(os.path.join(args.sdk_dir, "*.testdata.json"))
# Debug stuff # Debug output if no testdata file found
if not testdata_files: if not testdata_files:
if args.images_dir is not None: if args.images_dir is not None:
print(args.images_dir) print(args.images_dir)
...@@ -133,37 +112,39 @@ def main(): ...@@ -133,37 +112,39 @@ def main():
print("-- ", f) print("-- ", f)
sys.exit("ERROR: no *.testdata.json file found in image or sdk dir.") sys.exit("ERROR: no *.testdata.json file found in image or sdk dir.")
# The required build variables from testdata have the same values for image and SDK
# builds, so we just read one of them.
with open(testdata_files[0], "r", encoding="utf-8") as f: with open(testdata_files[0], "r", encoding="utf-8") as f:
buildvars = json.load(f) buildvars = json.load(f)
machine = buildvars["MACHINE"] machine = buildvars["MACHINE"]
version = buildvars["DISTRO_VERSION"] version = buildvars["DISTRO_VERSION"]
sdkname = buildvars["TOOLCHAIN_OUTPUTNAME"] sdkname = buildvars["TOOLCHAIN_OUTPUTNAME"]
artifacts_image = buildvars["DISTRO_IMAGES"].split() image_artifacts = buildvars["DISTRO_IMAGES"].split()
artifacts_all = buildvars["DISTRO_RELEASE_ARTEFACTS"].split() artifacts = buildvars["DISTRO_RELEASE_ARTEFACTS"].split()
artifacts_all.append("BUILD_SRCREVS.log") artifacts.append("BUILD_SRCREVS.log")
# Set release name
if version.startswith("fngsystem"): if version.startswith("fngsystem"):
release_name_local = version.replace("fngsystem", "FNGSystem") release_name = version.replace("fngsystem", "FNGSystem")
# outlocal_base = "/artifacts-fngsystem"
else: else:
release_name_local = "Yocto-%s" % version release_name = "Yocto-%s" % version
# outlocal_base = "/artifacts-yocto"
# Create output directories # Create output directories
if args.outputdir_upload is not None: if args.outputdir_upload is not None:
output_dir = os.path.join(args.outputdir_upload, release_name_local) outputdir_upload = os.path.join(args.outputdir_upload, release_name)
os.makedirs(output_dir, exist_ok=True) os.makedirs(outputdir_upload, exist_ok=True)
else: else:
output_dir = None outputdir_upload = None
if args.outputdir_local is not None: if args.outputdir_local is not None:
outlocal_dir = os.path.join(args.outputdir_local, release_name_local) outputdir_local = os.path.join(args.outputdir_local, release_name)
os.makedirs(outlocal_dir, exist_ok=True) os.makedirs(outputdir_local, exist_ok=True)
else: else:
outlocal_dir = None outputdir_local = None
# Convert markdown files into html and package them # Package documentation files
if args.doc_dir is not None: if args.doc_dir is not None:
# Convert markdown to html
doc_files = glob.glob(os.path.join(args.doc_dir, "*.md")) doc_files = glob.glob(os.path.join(args.doc_dir, "*.md"))
html_files = [] html_files = []
for f in doc_files: for f in doc_files:
...@@ -171,20 +152,28 @@ def main(): ...@@ -171,20 +152,28 @@ def main():
convertmd2html(f, fout) convertmd2html(f, fout)
html_files.append(fout) html_files.append(fout)
doc_md5sums = copy_files( files = doc_files + html_files
doc_files + html_files, "", "", output_dir, outlocal_dir
)
write_md5sums_file(doc_md5sums, "", output_dir, outlocal_dir)
# Generate MD5 sums file
doc_md5sums_file = "md5sums.txt"
generate_md5sums_file(files, doc_md5sums_file)
files.append(doc_md5sums_file)
# Copy files
if outputdir_upload is not None:
copy_files(files, outputdir_upload)
if outputdir_local is not None:
copy_files(files, outputdir_local)
# Package image files
if args.images_dir is not None: if args.images_dir is not None:
# Add some additional files to the artifacts # Add some additional files to the artifacts
for artifact in artifacts_image: for artifact in image_artifacts:
artifacts_all.append(artifact.split(".")[0] + ".manifest") artifacts.append(artifact.split(".")[0] + ".manifest")
artifacts_all.append(artifact.split(".")[0] + ".testdata.json") artifacts.append(artifact.split(".")[0] + ".testdata.json")
md5sums = copy_files( # Prepend path to artifacts
artifacts_all, args.images_dir, machine, output_dir, outlocal_dir artifacts = [os.path.join(args.images_dir, artifact) for artifact in artifacts]
)
# If the path for the licenses is set, we check for the list with all # If the path for the licenses is set, we check for the list with all
# licenses. If the list is found, we copy it to the output directory # licenses. If the list is found, we copy it to the output directory
...@@ -194,52 +183,49 @@ def main(): ...@@ -194,52 +183,49 @@ def main():
license_manifest = glob.glob( license_manifest = glob.glob(
os.path.join(args.licenses_dir, "**", "license.manifest") os.path.join(args.licenses_dir, "**", "license.manifest")
) )
artifacts.append(
os.path.join(os.path.dirname(license_manifest[0]), "license.manifest")
)
if license_manifest: # Generate MD5 sums file
md5sums.update( image_md5sums_file = os.path.join(args.images_dir, "md5sums.txt")
copy_files( generate_md5sums_file(artifacts, image_md5sums_file)
["license.manifest"], artifacts.append(image_md5sums_file)
os.path.dirname(license_manifest[0]),
machine, # Generate metadata file
output_dir,
outlocal_dir,
)
)
artifacts_all.append("license.manifest")
# Create md5sums file for build artifacts
write_md5sums_file(md5sums, machine, output_dir, outlocal_dir)
# Generate metadata in case of an image build
if args.sdk_dir is None:
generate_metadata( generate_metadata(
machine, machine,
version, version,
artifacts_all, artifacts,
sdkname, sdkname,
output_dir, os.path.join(args.images_dir, "metainfo.json"),
outlocal_dir,
) )
artifacts.append(os.path.join(args.images_dir, "metainfo.json"))
# Copy files
if outputdir_upload is not None:
copy_files(artifacts, os.path.join(outputdir_upload, machine))
if outputdir_local is not None:
copy_files(artifacts, os.path.join(outputdir_local, machine))
# Handle SDK if available # Package SDK
if args.sdk_dir is not None: if args.sdk_dir is not None:
sdkfiles = glob.glob(os.path.join(args.sdk_dir, sdkname + "*")) sdkfiles = glob.glob(os.path.join(args.sdk_dir, sdkname + "*"))
print("package_release.py: Upload the following sdk files:")
print(sdkfiles) # Generate MD5 sums file
sdk_md5sums = copy_files( sdk_md5sums_file = os.path.join(machine, "sdk", "md5sums.txt")
sdkfiles, "", os.path.join(machine, "sdk"), None, outlocal_dir generate_md5sums_file(sdkfiles, sdk_md5sums_file)
) sdkfiles.append(sdk_md5sums_file)
write_md5sums_file(
sdk_md5sums, os.path.join(machine, "sdk"), None, outlocal_dir # Copy files
) if outputdir_local is not None:
else: copy_files(sdkfiles, os.path.join(outputdir_local, machine, "sdk"))
print("package_release.py: sdk_dir is not specified")
# Store pathes and other stuff in environment variable file # Store pathes and other stuff in environment variable file
with open("package.env", "w", encoding="utf-8") as env_file: with open("package.env", "w", encoding="utf-8") as env_file:
env_file.write("VERSION={}\n".format(version)) env_file.write("VERSION={}\n".format(version))
env_file.write("MACHINE={}\n".format(machine)) env_file.write("MACHINE={}\n".format(machine))
env_file.write("LOCALDIR={}\n".format(outlocal_dir)) env_file.write("LOCALDIR={}\n".format(outputdir_local))
if __name__ == "__main__": if __name__ == "__main__":
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment