diff --git a/eng/Version.Details.props b/eng/Version.Details.props index e5c32725d..511b05ac3 100644 --- a/eng/Version.Details.props +++ b/eng/Version.Details.props @@ -6,7 +6,7 @@ This file should be imported by eng/Versions.props - 11.0.0-beta.26230.2 + 11.0.0-beta.26263.2 diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index be487fbb0..d208925e6 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -3,9 +3,9 @@ - + https://github.com/dotnet/arcade - 8a7b2d3d39078db20f33067c7929b88fdff4ee63 + 8ca6905d1b42b39623ed8a3f68379777ff86d427 diff --git a/eng/common/core-templates/steps/publish-logs.yml b/eng/common/core-templates/steps/publish-logs.yml index 84a1922c7..5fc099a11 100644 --- a/eng/common/core-templates/steps/publish-logs.yml +++ b/eng/common/core-templates/steps/publish-logs.yml @@ -33,8 +33,6 @@ steps: '$(publishing-dnceng-devdiv-code-r-build-re)' '$(dn-bot-all-orgs-artifact-feeds-rw)' '$(akams-client-id)' - '$(microsoft-symbol-server-pat)' - '$(symweb-symbol-server-pat)' '$(dnceng-symbol-server-pat)' '$(dn-bot-all-orgs-build-rw-code-rw)' '$(System.AccessToken)' diff --git a/eng/common/cross/build-rootfs.sh b/eng/common/cross/build-rootfs.sh index 314c93c57..cef5d2d67 100755 --- a/eng/common/cross/build-rootfs.sh +++ b/eng/common/cross/build-rootfs.sh @@ -5,7 +5,7 @@ set -e usage() { echo "Usage: $0 [BuildArch] [CodeName] [lldbx.y] [llvmx[.y]] [--skipunmount] --rootfsdir ]" - echo "BuildArch can be: arm(default), arm64, armel, armv6, loongarch64, ppc64le, riscv64, s390x, x64, x86" + echo "BuildArch can be: arm(default), arm64, loongarch64, ppc64le, riscv64, s390x, x64, x86" echo "CodeName - optional, Code name for Linux, can be: xenial(default), zesty, bionic, alpine" echo " for alpine can be specified with version: alpineX.YY or alpineedge" echo " for FreeBSD can be: freebsd13, freebsd14" @@ -139,7 +139,6 @@ __AlpineKeys=' 616db30d:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAnpUpyWDWjlUk3smlWeA0\nlIMW+oJ38t92CRLHH3IqRhyECBRW0d0aRGtq7TY8PmxjjvBZrxTNDpJT6KUk4LRm\na6A6IuAI7QnNK8SJqM0DLzlpygd7GJf8ZL9SoHSH+gFsYF67Cpooz/YDqWrlN7Vw\ntO00s0B+eXy+PCXYU7VSfuWFGK8TGEv6HfGMALLjhqMManyvfp8hz3ubN1rK3c8C\nUS/ilRh1qckdbtPvoDPhSbTDmfU1g/EfRSIEXBrIMLg9ka/XB9PvWRrekrppnQzP\nhP9YE3x/wbFc5QqQWiRCYyQl/rgIMOXvIxhkfe8H5n1Et4VAorkpEAXdsfN8KSVv\nLSMazVlLp9GYq5SUpqYX3KnxdWBgN7BJoZ4sltsTpHQ/34SXWfu3UmyUveWj7wp0\nx9hwsPirVI00EEea9AbP7NM2rAyu6ukcm4m6ATd2DZJIViq2es6m60AE6SMCmrQF\nwmk4H/kdQgeAELVfGOm2VyJ3z69fQuywz7xu27S6zTKi05Qlnohxol4wVb6OB7qG\nLPRtK9ObgzRo/OPumyXqlzAi/Yvyd1ZQk8labZps3e16bQp8+pVPiumWioMFJDWV\nGZjCmyMSU8V6MB6njbgLHoyg2LCukCAeSjbPGGGYhnKLm1AKSoJh3IpZuqcKCk5C\n8CM1S15HxV78s9dFntEqIokCAwEAAQ== 66ba20fe:MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAtfB12w4ZgqsXWZDfUAV/\n6Y4aHUKIu3q4SXrNZ7CXF9nXoAVYrS7NAxJdAodsY3vPCN0g5O8DFXR+390LdOuQ\n+HsGKCc1k5tX5ZXld37EZNTNSbR0k+NKhd9h6X3u6wqPOx7SIKxwAQR8qeeFq4pP\nrt9GAGlxtuYgzIIcKJPwE0dZlcBCg+GnptCUZXp/38BP1eYC+xTXSL6Muq1etYfg\nodXdb7Yl+2h1IHuOwo5rjgY5kpY7GcAs8AjGk3lDD/av60OTYccknH0NCVSmPoXK\nvrxDBOn0LQRNBLcAfnTKgHrzy0Q5h4TNkkyTgxkoQw5ObDk9nnabTxql732yy9BY\ns+hM9+dSFO1HKeVXreYSA2n1ndF18YAvAumzgyqzB7I4pMHXq1kC/8bONMJxwSkS\nYm6CoXKyavp7RqGMyeVpRC7tV+blkrrUml0BwNkxE+XnwDRB3xDV6hqgWe0XrifD\nYTfvd9ScZQP83ip0r4IKlq4GMv/R5shcCRJSkSZ6QSGshH40JYSoiwJf5FHbj9ND\n7do0UAqebWo4yNx63j/wb2ULorW3AClv0BCFSdPsIrCStiGdpgJDBR2P2NZOCob3\nG9uMj+wJD6JJg2nWqNJxkANXX37Qf8plgzssrhrgOvB0fjjS7GYhfkfmZTJ0wPOw\nA8+KzFseBh4UFGgue78KwgkCAwEAAQ== ' -__Keyring= __KeyringFile="/usr/share/keyrings/ubuntu-archive-keyring.gpg" __SkipSigCheck=0 __SkipEmulation=0 @@ -162,6 +161,10 @@ while :; do __AlpineArch=armv7 __QEMUArch=arm ;; + armel) + # this is only used for tizen-build-rootfs.sh + __BuildArch=armel + ;; arm64) __BuildArch=arm64 __UbuntuArch=arm64 @@ -172,31 +175,6 @@ while :; do __OpenBSDArch=arm64 __OpenBSDMachineArch=aarch64 ;; - armel) - __BuildArch=armel - __UbuntuArch=armel - __UbuntuRepo="http://archive.debian.org/debian/" - __CodeName=buster - __KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg" - __LLDB_Package="liblldb-6.0-dev" - __UbuntuPackages="${__UbuntuPackages// libomp-dev/}" - __UbuntuPackages="${__UbuntuPackages// libomp5/}" - __UbuntuSuites= - ;; - armv6) - __BuildArch=armv6 - __UbuntuArch=armhf - __QEMUArch=arm - __UbuntuRepo="http://raspbian.raspberrypi.org/raspbian/" - __CodeName=buster - __KeyringFile="/usr/share/keyrings/raspbian-archive-keyring.gpg" - __LLDB_Package="liblldb-6.0-dev" - __UbuntuSuites= - - if [[ -e "$__KeyringFile" ]]; then - __Keyring="--keyring $__KeyringFile" - fi - ;; loongarch64) __BuildArch=loongarch64 __AlpineArch=loongarch64 @@ -204,10 +182,6 @@ while :; do __UbuntuArch=loong64 __UbuntuSuites=unreleased __LLDB_Package="liblldb-19-dev" - - if [[ "$__CodeName" == "sid" ]]; then - __UbuntuRepo="http://ftp.ports.debian.org/debian-ports/" - fi ;; riscv64) __BuildArch=riscv64 @@ -223,7 +197,7 @@ while :; do __AlpineArch=ppc64le __QEMUArch=ppc64le __UbuntuArch=ppc64el - __UbuntuRepo="http://ports.ubuntu.com/ubuntu-ports/" + __UbuntuRepo="https://ports.ubuntu.com/ubuntu-ports/" __UbuntuPackages="${__UbuntuPackages// libunwind8-dev/}" __UbuntuPackages="${__UbuntuPackages// libomp-dev/}" __UbuntuPackages="${__UbuntuPackages// libomp5/}" @@ -234,7 +208,7 @@ while :; do __AlpineArch=s390x __QEMUArch=s390x __UbuntuArch=s390x - __UbuntuRepo="http://ports.ubuntu.com/ubuntu-ports/" + __UbuntuRepo="https://ports.ubuntu.com/ubuntu-ports/" __UbuntuPackages="${__UbuntuPackages// libunwind8-dev/}" __UbuntuPackages="${__UbuntuPackages// libomp-dev/}" __UbuntuPackages="${__UbuntuPackages// libomp5/}" @@ -250,13 +224,13 @@ while :; do __OpenBSDMachineArch=amd64 __illumosArch=x86_64 __HaikuArch=x86_64 - __UbuntuRepo="http://archive.ubuntu.com/ubuntu/" + __UbuntuRepo="https://archive.ubuntu.com/ubuntu/" ;; x86) __BuildArch=x86 __UbuntuArch=i386 __AlpineArch=x86 - __UbuntuRepo="http://archive.ubuntu.com/ubuntu/" + __UbuntuRepo="https://archive.ubuntu.com/ubuntu/" ;; lldb*) version="$(echo "$lowerI" | tr -d '[:alpha:]-=')" @@ -316,7 +290,7 @@ while :; do __KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg" if [[ -z "$__UbuntuRepo" ]]; then - __UbuntuRepo="http://ftp.debian.org/debian/" + __UbuntuRepo="https://archive.debian.org/debian/" fi ;; buster) # Debian 10 @@ -325,7 +299,7 @@ while :; do __KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg" if [[ -z "$__UbuntuRepo" ]]; then - __UbuntuRepo="http://archive.debian.org/debian/" + __UbuntuRepo="https://archive.debian.org/debian/" fi ;; bullseye) # Debian 11 @@ -333,7 +307,7 @@ while :; do __KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg" if [[ -z "$__UbuntuRepo" ]]; then - __UbuntuRepo="http://ftp.debian.org/debian/" + __UbuntuRepo="https://ftp.debian.org/debian/" fi ;; bookworm) # Debian 12 @@ -341,7 +315,7 @@ while :; do __KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg" if [[ -z "$__UbuntuRepo" ]]; then - __UbuntuRepo="http://ftp.debian.org/debian/" + __UbuntuRepo="https://ftp.debian.org/debian/" fi ;; sid) # Debian sid @@ -350,25 +324,21 @@ while :; do # Debian-Ports architectures need different values case "$__UbuntuArch" in - amd64|arm64|armel|armhf|i386|mips64el|ppc64el|riscv64|s390x) + amd64|arm64|armhf|i386|mips64el|ppc64el|riscv64|s390x) __KeyringFile="/usr/share/keyrings/debian-archive-keyring.gpg" if [[ -z "$__UbuntuRepo" ]]; then - __UbuntuRepo="http://ftp.debian.org/debian/" + __UbuntuRepo="https://ftp.debian.org/debian/" fi ;; *) __KeyringFile="/usr/share/keyrings/debian-ports-archive-keyring.gpg" if [[ -z "$__UbuntuRepo" ]]; then - __UbuntuRepo="http://ftp.ports.debian.org/debian-ports/" + __UbuntuRepo="https://ftp.debian.org/debian-ports/" fi ;; esac - - if [[ -e "$__KeyringFile" ]]; then - __Keyring="--keyring $__KeyringFile" - fi ;; tizen) __CodeName= @@ -472,7 +442,7 @@ fi __UbuntuPackages+=" ${__LLDB_Package:-}" if [[ -z "$__UbuntuRepo" ]]; then - __UbuntuRepo="http://ports.ubuntu.com/" + __UbuntuRepo="https://ports.ubuntu.com/" fi if [[ -n "$__LLVM_MajorVersion" ]]; then @@ -559,15 +529,15 @@ if [[ "$__CodeName" == "alpine" ]]; then # initialize DB # shellcheck disable=SC2086 "$__ApkToolsDir/apk.static" \ - -X "http://dl-cdn.alpinelinux.org/alpine/$version/main" \ - -X "http://dl-cdn.alpinelinux.org/alpine/$version/community" \ + -X "https://dl-cdn.alpinelinux.org/alpine/$version/main" \ + -X "https://dl-cdn.alpinelinux.org/alpine/$version/community" \ -U $__ApkSignatureArg --root "$__RootfsDir" --arch "$__AlpineArch" --initdb add if [[ "$__AlpineLlvmLibsLookup" == 1 ]]; then # shellcheck disable=SC2086 __AlpinePackages+=" $("$__ApkToolsDir/apk.static" \ - -X "http://dl-cdn.alpinelinux.org/alpine/$version/main" \ - -X "http://dl-cdn.alpinelinux.org/alpine/$version/community" \ + -X "https://dl-cdn.alpinelinux.org/alpine/$version/main" \ + -X "https://dl-cdn.alpinelinux.org/alpine/$version/community" \ -U $__ApkSignatureArg --root "$__RootfsDir" --arch "$__AlpineArch" \ search 'llvm*-libs' | grep -E '^llvm' | sort | tail -1 | sed 's/-[^-]*//2g')" fi @@ -575,8 +545,8 @@ if [[ "$__CodeName" == "alpine" ]]; then # install all packages in one go # shellcheck disable=SC2086 "$__ApkToolsDir/apk.static" \ - -X "http://dl-cdn.alpinelinux.org/alpine/$version/main" \ - -X "http://dl-cdn.alpinelinux.org/alpine/$version/community" \ + -X "https://dl-cdn.alpinelinux.org/alpine/$version/main" \ + -X "https://dl-cdn.alpinelinux.org/alpine/$version/community" \ -U $__ApkSignatureArg --root "$__RootfsDir" --arch "$__AlpineArch" $__NoEmulationArg \ add $__AlpinePackages @@ -593,7 +563,7 @@ elif [[ "$__CodeName" == "freebsd" ]]; then curl -SL "https://download.freebsd.org/ftp/releases/${__FreeBSDArch}/${__FreeBSDMachineArch}/${__FreeBSDBase}/base.txz" | tar -C "$__RootfsDir" -Jxf - ./lib ./usr/lib ./usr/libdata ./usr/include ./usr/share/keys ./etc ./bin/freebsd-version fi echo "ABI = \"FreeBSD:${__FreeBSDABI}:${__FreeBSDMachineArch}\"; FINGERPRINTS = \"${__RootfsDir}/usr/share/keys\"; REPOS_DIR = [\"${__RootfsDir}/etc/pkg\"]; REPO_AUTOUPDATE = NO; RUN_SCRIPTS = NO;" > "${__RootfsDir}"/usr/local/etc/pkg.conf - echo "FreeBSD: { url: \"pkg+http://pkg.FreeBSD.org/\${ABI}/quarterly\", mirror_type: \"srv\", signature_type: \"fingerprints\", fingerprints: \"/usr/share/keys/pkg\", enabled: yes }" > "${__RootfsDir}"/etc/pkg/FreeBSD.conf + echo "FreeBSD: { url: \"pkg+https://pkg.FreeBSD.org/\${ABI}/quarterly\", mirror_type: \"srv\", signature_type: \"fingerprints\", fingerprints: \"/usr/share/keys/pkg\", enabled: yes }" > "${__RootfsDir}"/etc/pkg/FreeBSD.conf mkdir -p "$__RootfsDir"/tmp # get and build package manager if [[ "$__hasWget" == 1 ]]; then @@ -830,6 +800,14 @@ elif [[ "$__CodeName" == "haiku" ]]; then elif [[ -n "$__CodeName" ]]; then __Suites="$__CodeName $(for suite in $__UbuntuSuites; do echo -n "$__CodeName-$suite "; done)" + __SigCheckArgs= + if [[ "$__SkipSigCheck" == "0" ]]; then + if [[ -e "$__KeyringFile" ]]; then + __SigCheckArgs="--keyring $__KeyringFile" + fi + __SigCheckArgs="$__SigCheckArgs --force-check-gpg" + fi + if [[ "$__SkipEmulation" == "1" ]]; then if [[ -z "$AR" ]]; then if command -v ar &>/dev/null; then @@ -845,31 +823,23 @@ elif [[ -n "$__CodeName" ]]; then PYTHON=${PYTHON_EXECUTABLE:-python3} # shellcheck disable=SC2086,SC2046 - echo running "$PYTHON" "$__CrossDir/install-debs.py" --arch "$__UbuntuArch" --mirror "$__UbuntuRepo" --rootfsdir "$__RootfsDir" --artool "$AR" \ + echo running "$PYTHON" "$__CrossDir/install-debs.py" $__SigCheckArgs --arch "$__UbuntuArch" --mirror "$__UbuntuRepo" --rootfsdir "$__RootfsDir" --artool "$AR" \ $(for suite in $__Suites; do echo -n "--suite $suite "; done) \ $__UbuntuPackages # shellcheck disable=SC2086,SC2046 - "$PYTHON" "$__CrossDir/install-debs.py" --arch "$__UbuntuArch" --mirror "$__UbuntuRepo" --rootfsdir "$__RootfsDir" --artool "$AR" \ + "$PYTHON" "$__CrossDir/install-debs.py" $__SigCheckArgs --arch "$__UbuntuArch" --mirror "$__UbuntuRepo" --rootfsdir "$__RootfsDir" --artool "$AR" \ $(for suite in $__Suites; do echo -n "--suite $suite "; done) \ $__UbuntuPackages exit 0 fi - __UpdateOptions= - if [[ "$__SkipSigCheck" == "0" ]]; then - __Keyring="$__Keyring --force-check-gpg" - else - __Keyring= - __UpdateOptions="--allow-unauthenticated --allow-insecure-repositories" - fi - # shellcheck disable=SC2086 - echo running debootstrap "--variant=minbase" $__Keyring --arch "$__UbuntuArch" "$__CodeName" "$__RootfsDir" "$__UbuntuRepo" + echo running debootstrap "--variant=minbase" $__SigCheckArgs --arch "$__UbuntuArch" "$__CodeName" "$__RootfsDir" "$__UbuntuRepo" # shellcheck disable=SC2086 - if ! debootstrap "--variant=minbase" $__Keyring --arch "$__UbuntuArch" "$__CodeName" "$__RootfsDir" "$__UbuntuRepo"; then + if ! debootstrap "--variant=minbase" $__SigCheckArgs --arch "$__UbuntuArch" "$__CodeName" "$__RootfsDir" "$__UbuntuRepo"; then echo "debootstrap failed! dumping debootstrap.log" cat "$__RootfsDir/debootstrap/debootstrap.log" exit 1 @@ -887,6 +857,11 @@ Components: main universe Signed-By: $__KeyringFile EOF + __UpdateOptions= + if [[ "$__SkipSigCheck" == "1" ]]; then + __UpdateOptions="--allow-unauthenticated --allow-insecure-repositories" + fi + # shellcheck disable=SC2086 chroot "$__RootfsDir" apt-get update $__UpdateOptions chroot "$__RootfsDir" apt-get -f -y install diff --git a/eng/common/cross/install-debs.py b/eng/common/cross/install-debs.py index c81eb37e5..100c4378d 100755 --- a/eng/common/cross/install-debs.py +++ b/eng/common/cross/install-debs.py @@ -4,6 +4,7 @@ import asyncio import aiohttp import gzip +import hashlib import os import re import shutil @@ -16,7 +17,7 @@ from collections import deque from functools import cmp_to_key -async def download_file(session, url, dest_path, max_retries=3, retry_delay=2, timeout=60): +async def download_file(session, url, dest_path, max_retries=3, retry_delay=2, timeout=60, checksum=None): """Asynchronous file download with retries.""" attempt = 0 while attempt < max_retries: @@ -25,19 +26,25 @@ async def download_file(session, url, dest_path, max_retries=3, retry_delay=2, t if response.status == 200: with open(dest_path, "wb") as f: content = await response.read() + + # verify checksum if provided + if checksum: + sha256 = hashlib.sha256(content).hexdigest() + if sha256 != checksum: + raise Exception(f"SHA256 mismatch for {url}: expected {checksum}, got {sha256}") + f.write(content) print(f"Downloaded {url} at {dest_path}") return else: - print(f"Failed to download {url}, Status Code: {response.status}") - break + raise Exception(f"Failed to download {url}, Status Code: {response.status}") except (asyncio.CancelledError, asyncio.TimeoutError, aiohttp.ClientError) as e: print(f"Error downloading {url}: {type(e).__name__} - {e}. Retrying...") attempt += 1 await asyncio.sleep(retry_delay) - print(f"Failed to download {url} after {max_retries} attempts.") + raise Exception(f"Failed to download {url} after {max_retries} attempts.") async def download_deb_files_parallel(mirror, packages, tmp_dir): """Download .deb files in parallel.""" @@ -51,11 +58,11 @@ async def download_deb_files_parallel(mirror, packages, tmp_dir): if filename: url = f"{mirror}/{filename}" dest_path = os.path.join(tmp_dir, os.path.basename(filename)) - tasks.append(asyncio.create_task(download_file(session, url, dest_path))) + tasks.append(asyncio.create_task(download_file(session, url, dest_path, checksum=info.get("SHA256")))) await asyncio.gather(*tasks) -async def download_package_index_parallel(mirror, arch, suites): +async def download_package_index_parallel(mirror, arch, suites, check_sig, keyring): """Download package index files for specified suites and components entirely in memory.""" tasks = [] timeout = aiohttp.ClientTimeout(total=60) @@ -63,10 +70,9 @@ async def download_package_index_parallel(mirror, arch, suites): async with aiohttp.ClientSession(timeout=timeout) as session: for suite in suites: for component in ["main", "universe"]: - url = f"{mirror}/dists/{suite}/{component}/binary-{arch}/Packages.gz" - tasks.append(fetch_and_decompress(session, url)) + tasks.append(fetch_and_decompress(session, mirror, arch, suite, component, check_sig, keyring)) - results = await asyncio.gather(*tasks, return_exceptions=True) + results = await asyncio.gather(*tasks) merged_content = "" for result in results: @@ -77,20 +83,71 @@ async def download_package_index_parallel(mirror, arch, suites): return merged_content -async def fetch_and_decompress(session, url): +async def fetch_and_decompress(session, mirror, arch, suite, component, check_sig, keyring): """Fetch and decompress the Packages.gz file.""" - try: - async with session.get(url) as response: - if response.status == 200: - compressed_data = await response.read() - decompressed_data = gzip.decompress(compressed_data).decode('utf-8') - print(f"Downloaded index: {url}") - return decompressed_data - else: - print(f"Skipped index: {url} (doesn't exist)") - return None - except Exception as e: - print(f"Error fetching {url}: {e}") + + path = f"{component}/binary-{arch}/Packages.gz" + url = f"{mirror}/dists/{suite}/{path}" + + async with session.get(url) as response: + if response.status == 200: + compressed_data = await response.read() + decompressed_data = gzip.decompress(compressed_data).decode('utf-8') + print(f"Downloaded index: {url}") + + if check_sig: + # Verify the package index against the sha256 recorded in the Release file + release_file_content = await fetch_release_file(session, mirror, suite, keyring) + packages_sha = parse_release_file(release_file_content, path) + + sha256 = hashlib.sha256(compressed_data).hexdigest() + if sha256 != packages_sha: + raise Exception(f"SHA256 mismatch for {path}: expected {packages_sha}, got {sha256}") + print(f"Checksum verified for {path}") + + return decompressed_data + else: + print(f"Skipped index: {url} (doesn't exist)") + return None + +async def fetch_release_file(session, mirror, suite, keyring): + """Fetch Release and Release.gpg files and verify the signature.""" + + release_url = f"{mirror}/dists/{suite}/Release" + release_gpg_url = f"{mirror}/dists/{suite}/Release.gpg" + + with tempfile.NamedTemporaryFile() as release_file, tempfile.NamedTemporaryFile() as release_gpg_file: + await download_file(session, release_url, release_file.name) + await download_file(session, release_gpg_url, release_gpg_file.name) + + print("Verifying signature of Release with Release.gpg.") + verify_command = ["gpg"] + if keyring: + verify_command += ["--keyring", keyring] + verify_command += ["--verify", release_gpg_file.name, release_file.name] + result = subprocess.run(verify_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + + if result.returncode != 0: + raise Exception(f"Signature verification failed: {result.stderr.decode('utf-8')}") + + print("Signature verified successfully.") + + with open(release_file.name) as f: + return f.read() + +def parse_release_file(content, path): + """Parses the Release file and returns sha256 checksum of the specified path.""" + + # data looks like this: + # + matches = re.findall(r'^ (\S*) +(\S*) +(\S*)$', content, re.MULTILINE) + + for entry in matches: + # the file has both md5 and sha256 checksums, we want sha256 which has a length of 64 + if entry[2] == path and len(entry[0]) == 64: + return entry[0] + + raise Exception(f"Could not find checksum for {path} in Release file.") def parse_debian_version(version): """Parse a Debian package version into epoch, upstream version, and revision.""" @@ -171,13 +228,15 @@ def parse_package_index(content): filename = fields.get("Filename") depends = fields.get("Depends") provides = fields.get("Provides", None) + sha256 = fields.get("SHA256") # Only update if package_name is not in packages or if the new version is higher if package_name not in packages or compare_debian_versions(version, packages[package_name]["Version"]) > 0: packages[package_name] = { "Version": version, "Filename": filename, - "Depends": depends + "Depends": depends, + "SHA256": sha256 } # Update aliases if package provides any alternatives @@ -233,7 +292,7 @@ def extract_deb_file(deb_file, tmp_dir, extract_dir, ar_tool): os.makedirs(extract_dir, exist_ok=True) with tempfile.TemporaryDirectory(dir=tmp_dir) as tmp_subdir: - result = subprocess.run(f"{ar_tool} t {os.path.abspath(deb_file)}", cwd=tmp_subdir, check=True, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + result = subprocess.run([ar_tool, "t", os.path.abspath(deb_file)], cwd=tmp_subdir, check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) tar_filename = None for line in result.stdout.decode().splitlines(): @@ -247,7 +306,8 @@ def extract_deb_file(deb_file, tmp_dir, extract_dir, ar_tool): tar_file_path = os.path.join(tmp_subdir, tar_filename) print(f"Extracting {tar_filename} from {deb_file}..") - subprocess.run(f"{ar_tool} p {os.path.abspath(deb_file)} {tar_filename} > {tar_file_path}", check=True, shell=True) + with open(tar_file_path, "wb") as outfile: + subprocess.run([ar_tool, "p", os.path.abspath(deb_file), tar_filename], check=True, stdout=outfile, stderr=subprocess.PIPE) file_extension = os.path.splitext(tar_file_path)[1].lower() @@ -268,7 +328,7 @@ def extract_deb_file(deb_file, tmp_dir, extract_dir, ar_tool): raise ValueError(f"Unsupported compression format: {file_extension}") with tarfile.open(tar_file_path, mode) as tar: - tar.extractall(path=extract_dir, filter='fully_trusted') + tar.extractall(path=extract_dir, filter='tar') def finalize_setup(rootfsdir): lib_dir = os.path.join(rootfsdir, 'lib') @@ -295,24 +355,17 @@ def finalize_setup(rootfsdir): if __name__ == "__main__": parser = argparse.ArgumentParser(description="Generate rootfs for .NET runtime on Debian-like OS") - parser.add_argument("--distro", required=False, help="Distro name (e.g., debian, ubuntu, etc.)") parser.add_argument("--arch", required=True, help="Architecture (e.g., amd64, loong64, etc.)") parser.add_argument("--rootfsdir", required=True, help="Destination directory.") parser.add_argument('--suite', required=True, action='append', help='Specify one or more repository suites to collect index data.') - parser.add_argument("--mirror", required=False, help="Mirror (e.g., http://ftp.debian.org/debian-ports etc.)") + parser.add_argument("--mirror", required=True, help="Mirror (e.g., http://ftp.debian.org/debian-ports etc.)") parser.add_argument("--artool", required=False, default="ar", help="ar tool to extract debs (e.g., ar, llvm-ar etc.)") + parser.add_argument("--force-check-gpg", required=False, action='store_true', help="Verify the packages against signatures in Release file.") + parser.add_argument("--keyring", required=False, default='', help="Keyring file to check signature of Release file.") parser.add_argument("packages", nargs="+", help="List of package names to be installed.") args = parser.parse_args() - if args.mirror is None: - if args.distro == "ubuntu": - args.mirror = "http://archive.ubuntu.com/ubuntu" if args.arch in ["amd64", "i386"] else "http://ports.ubuntu.com/ubuntu-ports" - elif args.distro == "debian": - args.mirror = "http://ftp.debian.org/debian-ports" - else: - raise Exception("Unsupported distro") - DESIRED_PACKAGES = args.packages + [ # base packages "dpkg", "busybox", @@ -322,9 +375,16 @@ def finalize_setup(rootfsdir): "debianutils" ] - print(f"Creating rootfs. rootfsdir: {args.rootfsdir}, distro: {args.distro}, arch: {args.arch}, suites: {args.suite}, mirror: {args.mirror}") + print(f"Creating rootfs. rootfsdir: {args.rootfsdir}, arch: {args.arch}, suites: {args.suite}, mirror: {args.mirror}") + + check_sig = args.force_check_gpg + if check_sig and not args.keyring: + print("ERROR: --force-check-gpg requires --keyring to specify a keyring file for signature verification.") + print("Install the appropriate keyring package (e.g., debian-ports-archive-keyring, ubuntu-archive-keyring)") + print("or pass --skipsigcheck to build-rootfs.sh to disable signature checking.") + sys.exit(1) - package_index_content = asyncio.run(download_package_index_parallel(args.mirror, args.arch, args.suite)) + package_index_content = asyncio.run(download_package_index_parallel(args.mirror, args.arch, args.suite, check_sig, args.keyring)) packages_info, aliases = parse_package_index(package_index_content) diff --git a/eng/common/tools.ps1 b/eng/common/tools.ps1 index b5d13be7a..0e281df8c 100644 --- a/eng/common/tools.ps1 +++ b/eng/common/tools.ps1 @@ -628,11 +628,7 @@ function GetSdkTaskProject([string]$taskName) { if (Test-Path $proj) { return $proj } - # TODO: Remove this fallback once all supported versions use the new layout. - $legacyProj = Join-Path $toolsetDir "SdkTasks\$taskName.proj" - if (Test-Path $legacyProj) { - return $legacyProj - } + throw "Unable to find $taskName.proj in toolset at: $toolsetDir" } @@ -708,23 +704,14 @@ function InitializeToolset() { $packageDir = Join-Path $nugetCache (Join-Path 'microsoft.dotnet.arcade.sdk' $toolsetVersion) $packageToolsetDir = Join-Path $packageDir 'toolset' - $packageToolsDir = Join-Path $packageDir 'tools' - # TODO: Remove the tools/ check once all supported versions have the toolset folder. - if (!(Test-Path $packageToolsetDir) -and !(Test-Path $packageToolsDir)) { + if (!(Test-Path $packageToolsetDir)) { Write-PipelineTelemetryError -Category 'InitializeToolset' -Message "Arcade SDK package does not contain a toolset or tools folder: $packageDir" ExitWithExitCode 3 } New-Item -ItemType Directory -Path $toolsetToolsDir -Force | Out-Null - - # Copy toolset if present at the package root (new layout), otherwise fall back to tools - if (Test-Path $packageToolsetDir) { - Copy-Item -Path "$packageToolsetDir\*" -Destination $toolsetToolsDir -Recurse -Force - } else { - # TODO: Remove this fallback once all supported versions have the toolset folder. - Copy-Item -Path "$packageToolsDir\*" -Destination $toolsetToolsDir -Recurse -Force - } + Copy-Item -Path "$packageToolsetDir\*" -Destination $toolsetToolsDir -Recurse -Force if (Test-Path $buildProjPath) { $toolsetBuildProj = $buildProjPath diff --git a/eng/common/tools.sh b/eng/common/tools.sh index c0f0b13cb..5ff37cfb7 100755 --- a/eng/common/tools.sh +++ b/eng/common/tools.sh @@ -458,21 +458,13 @@ function InitializeToolset { local package_dir="$_GetNuGetPackageCachePath/microsoft.dotnet.arcade.sdk/$toolset_version" - # TODO: Remove the tools/ check once all supported versions have the toolset folder. - if [[ ! -d "$package_dir/toolset" && ! -d "$package_dir/tools" ]]; then - Write-PipelineTelemetryError -category 'InitializeToolset' "Arcade SDK package does not contain a toolset or tools folder: $package_dir" + if [[ ! -d "$package_dir/toolset" ]]; then + Write-PipelineTelemetryError -category 'InitializeToolset' "Arcade SDK package does not contain a toolset folder: $package_dir" ExitWithExitCode 3 fi mkdir -p "$toolset_tools_dir" - - # Copy toolset if present at the package root (new layout), otherwise fall back to tools - if [[ -d "$package_dir/toolset" ]]; then - cp -r "$package_dir/toolset/." "$toolset_tools_dir" - else - # TODO: Remove this fallback once all supported versions have the toolset folder. - cp -r "$package_dir/tools/." "$toolset_tools_dir" - fi + cp -r "$package_dir/toolset/." "$toolset_tools_dir" if [[ -a "$toolset_tools_dir/Build.proj" ]]; then toolset_build_proj="$toolset_tools_dir/Build.proj" @@ -629,12 +621,7 @@ function GetSdkTaskProject { echo "$proj" return fi - # TODO: Remove this fallback once all supported versions use the new layout. - local legacyProj="$toolsetDir/SdkTasks/$taskName.proj" - if [[ -a "$legacyProj" ]]; then - echo "$legacyProj" - return - fi + Write-PipelineTelemetryError -category 'Build' "Unable to find $taskName.proj in toolset at: $toolsetDir" ExitWithExitCode 3 } diff --git a/global.json b/global.json index 8db1e24ab..104902cc1 100644 --- a/global.json +++ b/global.json @@ -1,11 +1,11 @@ { "sdk": { - "version": "11.0.100-preview.4.26210.111", + "version": "11.0.100-preview.5.26227.104", "rollForward": "latestMajor", "allowPrerelease": true }, "tools": { - "dotnet": "11.0.100-preview.4.26210.111", + "dotnet": "11.0.100-preview.5.26227.104", "runtimes": { "dotnet": [ "9.0.11", @@ -14,6 +14,6 @@ } }, "msbuild-sdks": { - "Microsoft.DotNet.Arcade.Sdk": "11.0.0-beta.26230.2" + "Microsoft.DotNet.Arcade.Sdk": "11.0.0-beta.26263.2" } }