diff --git a/Makefile b/Makefile index af3d7086ec8e..bdf95f9dbf01 100644 --- a/Makefile +++ b/Makefile @@ -39,8 +39,9 @@ ifeq ($(NOBUSTER), 0) endif ifeq ($(NOBULLSEYE), 0) BLDENV=bullseye make -f Makefile.work $@ +else + make -f Makefile.work $@ endif - BLDENV=bullseye make -f Makefile.work docker-cleanup jessie: @echo "+++ Making $@ +++" @@ -84,7 +85,7 @@ $(PLATFORM_PATH): configure : $(PLATFORM_PATH) $(call make_work, $@) -clean reset showtag docker-cleanup sonic-slave-build sonic-slave-bash : +clean reset showtag sonic-slave-build sonic-slave-bash : $(call make_work, $@) # Freeze the versions, see more detail options: scripts/versions_manager.py freeze -h diff --git a/Makefile.work b/Makefile.work index 107134cf0979..8e6564ca5b7a 100644 --- a/Makefile.work +++ b/Makefile.work @@ -172,21 +172,40 @@ endif endif # Generate the version control build info +SONIC_VERSION_CACHE := $(filter-out none,$(SONIC_VERSION_CACHE_METHOD)) +SONIC_OVERRIDE_BUILD_VARS += SONIC_VERSION_CACHE=$(SONIC_VERSION_CACHE) +SONIC_OVERRIDE_BUILD_VARS += SONIC_VERSION_CACHE_SOURCE=$(SONIC_VERSION_CACHE_SOURCE) +export SONIC_VERSION_CACHE SONIC_VERSION_CACHE_SOURCE +$(shell test -d $(SONIC_VERSION_CACHE_SOURCE) || \ + mkdir -p $(SONIC_VERSION_CACHE_SOURCE) && chmod -f 777 $(SONIC_VERSION_CACHE_SOURCE) 2>/dev/null ) $(shell SONIC_VERSION_CONTROL_COMPONENTS=$(SONIC_VERSION_CONTROL_COMPONENTS) \ TRUSTED_GPG_URLS=$(TRUSTED_GPG_URLS) PACKAGE_URL_PREFIX=$(PACKAGE_URL_PREFIX) \ + DBGOPT='$(DBGOPT)' \ + DISTRO=$(BLDENV) \ + SONIC_VERSION_CACHE=$(SONIC_VERSION_CACHE) \ + SONIC_VERSION_CACHE_SOURCE=$(SONIC_VERSION_CACHE_SOURCE) \ scripts/generate_buildinfo_config.sh) # Generate the slave Dockerfile, and prepare build info for it -$(shell CONFIGURED_ARCH=$(CONFIGURED_ARCH) MULTIARCH_QEMU_ENVIRON=$(MULTIARCH_QEMU_ENVIRON) CROSS_BUILD_ENVIRON=$(CROSS_BUILD_ENVIRON) ENABLE_FIPS_FEATURE=$(ENABLE_FIPS_FEATURE) DOCKER_EXTRA_OPTS=$(DOCKER_EXTRA_OPTS) DEFAULT_CONTAINER_REGISTRY=$(DEFAULT_CONTAINER_REGISTRY) j2 $(SLAVE_DIR)/Dockerfile.j2 > $(SLAVE_DIR)/Dockerfile) -$(shell CONFIGURED_ARCH=$(CONFIGURED_ARCH) MULTIARCH_QEMU_ENVIRON=$(MULTIARCH_QEMU_ENVIRON) CROSS_BUILD_ENVIRON=$(CROSS_BUILD_ENVIRON) j2 $(SLAVE_DIR)/Dockerfile.user.j2 > $(SLAVE_DIR)/Dockerfile.user) -$(shell BUILD_SLAVE=y DEFAULT_CONTAINER_REGISTRY=$(DEFAULT_CONTAINER_REGISTRY) scripts/prepare_docker_buildinfo.sh $(SLAVE_BASE_IMAGE) $(SLAVE_DIR)/Dockerfile $(CONFIGURED_ARCH) "" $(BLDENV)) +$(shell CONFIGURED_ARCH=$(CONFIGURED_ARCH) MULTIARCH_QEMU_ENVIRON=$(MULTIARCH_QEMU_ENVIRON) \ + CROSS_BUILD_ENVIRON=$(CROSS_BUILD_ENVIRON) ENABLE_FIPS_FEATURE=$(ENABLE_FIPS_FEATURE) DOCKER_EXTRA_OPTS=$(DOCKER_EXTRA_OPTS) \ + DEFAULT_CONTAINER_REGISTRY=$(DEFAULT_CONTAINER_REGISTRY) j2 $(SLAVE_DIR)/Dockerfile.j2 > $(SLAVE_DIR)/Dockerfile) +$(shell CONFIGURED_ARCH=$(CONFIGURED_ARCH) MULTIARCH_QEMU_ENVIRON=$(MULTIARCH_QEMU_ENVIRON) \ + CROSS_BUILD_ENVIRON=$(CROSS_BUILD_ENVIRON) j2 $(SLAVE_DIR)/Dockerfile.user.j2 > $(SLAVE_DIR)/Dockerfile.user) +PREPARE_DOCKER=DEFAULT_CONTAINER_REGISTRY=$(DEFAULT_CONTAINER_REGISTRY) BUILD_SLAVE=y DBGOPT='$(DBGOPT)' SONIC_VERSION_CACHE=$(SONIC_VERSION_CACHE) \ + scripts/prepare_docker_buildinfo.sh $(SLAVE_BASE_IMAGE) $(SLAVE_DIR)/Dockerfile $(CONFIGURED_ARCH) "" $(BLDENV) + +$(shell $(PREPARE_DOCKER) ) # Add the versions in the tag, if the version change, need to rebuild the slave -SLAVE_BASE_TAG = $(shell cat $(SLAVE_DIR)/Dockerfile $(SLAVE_DIR)/buildinfo/versions/versions-* src/sonic-build-hooks/hooks/* | sha1sum | awk '{print substr($$1,0,11);}') +SLAVE_BASE_TAG = $(shell cat $(SLAVE_DIR)/Dockerfile $(SLAVE_DIR)/buildinfo/versions/versions-* src/sonic-build-hooks/hooks/* 2>/dev/null | sha1sum | awk '{print substr($$1,0,11);}') # Calculate the slave TAG based on $(USER)/$(PWD)/$(CONFIGURED_PLATFORM) to get unique SHA ID -SLAVE_TAG = $(shell (cat $(SLAVE_DIR)/Dockerfile.user $(SLAVE_DIR)/Dockerfile $(SLAVE_DIR)/buildinfo/versions/versions-* .git/HEAD && echo $(USER)/$(PWD)/$(CONFIGURED_PLATFORM)) \ +SLAVE_TAG = $(shell (cat $(SLAVE_DIR)/Dockerfile.user $(SLAVE_DIR)/Dockerfile $(SLAVE_DIR)/buildinfo/versions/versions-* .git/HEAD 2>/dev/null && echo $(USER)/$(PWD)/$(CONFIGURED_PLATFORM)) \ | sha1sum | awk '{print substr($$1,0,11);}') +COLLECT_DOCKER=DEFAULT_CONTAINER_REGISTRY=$(DEFAULT_CONTAINER_REGISTRY) DBGOPT='$(DBGOPT)' SONIC_VERSION_CACHE=$(SONIC_VERSION_CACHE) \ + scripts/collect_docker_version_files.sh $(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) target $(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) $(SLAVE_DIR) $(SLAVE_DIR)/Dockerfile + OVERLAY_MODULE_CHECK := \ lsmod | grep -q "^overlay " &>/dev/null || \ zgrep -q 'CONFIG_OVERLAY_FS=y' /proc/config.gz &>/dev/null || \ @@ -240,6 +259,10 @@ ifneq ($(SONIC_DPKG_CACHE_SOURCE),) DOCKER_RUN += -v "$(SONIC_DPKG_CACHE_SOURCE):/dpkg_cache:rw" endif +ifneq ($(SONIC_VERSION_CACHE_SOURCE),) + DOCKER_RUN += -v "$(SONIC_VERSION_CACHE_SOURCE):/vcache:rw" +endif + ifeq ($(SONIC_ENABLE_SECUREBOOT_SIGNATURE), y) ifneq ($(SIGNING_KEY),) DOCKER_SIGNING_SOURCE := $(shell dirname $(SIGNING_KEY)) @@ -328,26 +351,65 @@ DOCKER_BASE_LOG = $(SLAVE_DIR)/$(SLAVE_BASE_IMAGE)_$(SLAVE_BASE_TAG).log DOCKER_LOG = $(SLAVE_DIR)/$(SLAVE_IMAGE)_$(SLAVE_TAG).log -DOCKER_BASE_BUILD = docker build --no-cache \ +DOCKER_SLAVE_BASE_BUILD = docker build --no-cache \ -t $(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) \ --build-arg http_proxy=$(http_proxy) \ --build-arg https_proxy=$(https_proxy) \ --build-arg no_proxy=$(no_proxy) \ - $(SLAVE_DIR) $(SPLIT_LOG) $(DOCKER_BASE_LOG) + --build-arg SONIC_VERSION_CACHE=$(SONIC_VERSION_CACHE) \ + --build-arg SONIC_VERSION_CONTROL_COMPONENTS=$(SONIC_VERSION_CONTROL_COMPONENTS) \ + $(SLAVE_DIR) \ + | awk '/^_VCSTART_$$/,/^_VCEND_$$/{if($$0 !~ /_VCSTART_|_VCEND_/)print >"$(SLAVE_BASE_IMAGE)/vcache/cache.base64";next}{print }' \ + $(SPLIT_LOG) $(DOCKER_BASE_LOG) + DOCKER_BASE_PULL = docker pull \ $(REGISTRY_SERVER):$(REGISTRY_PORT)/$(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) -DOCKER_BUILD = docker build --no-cache \ +DOCKER_USER_BUILD = docker build --no-cache \ --build-arg user=$(USER) \ --build-arg uid=$(shell id -u) \ --build-arg guid=$(shell id -g) \ --build-arg hostname=$(shell echo $$HOSTNAME) \ - --build-arg slave_base_tag_ref=$(SLAVE_BASE_TAG) \ + --build-arg slave_base_tag_ref=$(SLAVE_BASE_TAG) \ -t $(SLAVE_IMAGE):$(SLAVE_TAG) \ -f $(SLAVE_DIR)/Dockerfile.user \ $(SLAVE_DIR) $(SPLIT_LOG) $(DOCKER_LOG) + +DOCKER_SLAVE_BASE_BUILD_REGISTRY = \ + { \ + [ $(ENABLE_DOCKER_BASE_PULL) == y ] && { echo Image $(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) not found. Pulling...; } && \ + $(DOCKER_BASE_PULL) && \ + { \ + docker tag $(REGISTRY_SERVER):$(REGISTRY_PORT)/$(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) $(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) && \ + $(COLLECT_DOCKER) ; \ + } \ + } || \ + { \ + echo Image $(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) not found in registry. Building... ; \ + $(PREPARE_DOCKER) ; \ + $(DOCKER_SLAVE_BASE_BUILD) ; \ + $(COLLECT_DOCKER) ; \ + } + +SONIC_SLAVE_BASE_BUILD = \ + echo Checking sonic-slave-base image: $(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG); \ + docker inspect --type image $(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) &> /dev/null || \ + { \ + echo Image $(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) not found. Building... ; \ + $(DOCKER_SLAVE_BASE_BUILD_REGISTRY) ; \ + } + +SONIC_SLAVE_USER_BUILD = \ + echo Checking sonic-slave image: $(SLAVE_IMAGE):$(SLAVE_TAG); \ + docker inspect --type image $(SLAVE_IMAGE):$(SLAVE_TAG) &> /dev/null || \ + { \ + echo Image $(SLAVE_IMAGE):$(SLAVE_TAG) not found. Building... ; \ + $(DOCKER_USER_BUILD) ; \ + } + + SONIC_BUILD_INSTRUCTION := make \ -f slave.mk \ PLATFORM=$(PLATFORM) \ @@ -412,9 +474,16 @@ SONIC_BUILD_INSTRUCTION := make \ .PHONY: sonic-slave-build sonic-slave-bash init reset + +COLLECT_BUILD_VERSION = DBGOPT='$(DBGOPT)' scripts/collect_build_version_files.sh $$? +ifneq ($(filter clean,$(MAKECMDGOALS)),) +COLLECT_BUILD_VERSION="echo " +endif + + .DEFAULT_GOAL := all -%:: +%::|sonic-build-hooks ifneq ($(filter y, $(MULTIARCH_QEMU_ENVIRON) $(CROSS_BUILD_ENVIRON)),) @$(DOCKER_MULTIARCH_CHECK) ifneq ($(BLDENV), ) @@ -423,57 +492,38 @@ ifneq ($(BLDENV), ) endif endif @$(OVERLAY_MODULE_CHECK) + @$(SONIC_SLAVE_BASE_BUILD) + @$(SONIC_SLAVE_USER_BUILD) - @pushd src/sonic-build-hooks; TRUSTED_GPG_URLS=$(TRUSTED_GPG_URLS) make all; popd - @cp src/sonic-build-hooks/buildinfo/sonic-build-hooks* $(SLAVE_DIR)/buildinfo - @docker inspect --type image $(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) &> /dev/null || \ - { [ $(ENABLE_DOCKER_BASE_PULL) == y ] && { echo Image $(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) not found. Pulling...; } && \ - $(DOCKER_BASE_PULL) && \ - { docker tag $(REGISTRY_SERVER):$(REGISTRY_PORT)/$(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) $(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) && \ - scripts/collect_docker_version_files.sh $(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) target ; } } || \ - { echo Image $(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) not found. Building... ; \ - $(DOCKER_BASE_BUILD) ; \ - scripts/collect_docker_version_files.sh $(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) target ; } - @docker inspect --type image $(SLAVE_IMAGE):$(SLAVE_TAG) &> /dev/null || \ - { echo Image $(SLAVE_IMAGE):$(SLAVE_TAG) not found. Building... ; \ - $(DOCKER_BUILD) ; } ifeq "$(KEEP_SLAVE_ON)" "yes" ifdef SOURCE_FOLDER - @$(DOCKER_RUN) -v $(SOURCE_FOLDER):/var/$(USER)/src $(SLAVE_IMAGE):$(SLAVE_TAG) bash -c "$(SONIC_BUILD_INSTRUCTION) $@; scripts/collect_build_version_files.sh \$$?; /bin/bash" + @$(DOCKER_RUN) -v $(SOURCE_FOLDER):/var/$(USER)/src $(SLAVE_IMAGE):$(SLAVE_TAG) bash -c "$(SONIC_BUILD_INSTRUCTION) $@; $(COLLECT_BUILD_VERSION); /bin/bash" else - @$(DOCKER_RUN) $(SLAVE_IMAGE):$(SLAVE_TAG) bash -c "$(SONIC_BUILD_INSTRUCTION) $@; scripts/collect_build_version_files.sh \$$?; /bin/bash" + @$(DOCKER_RUN) $(SLAVE_IMAGE):$(SLAVE_TAG) bash -c "$(SONIC_BUILD_INSTRUCTION) $@; $(COLLECT_BUILD_VERSION); /bin/bash" endif else - @$(DOCKER_RUN) $(SLAVE_IMAGE):$(SLAVE_TAG) bash -c "$(SONIC_BUILD_INSTRUCTION) $@; scripts/collect_build_version_files.sh \$$?" + @$(DOCKER_RUN) $(SLAVE_IMAGE):$(SLAVE_TAG) bash -c "$(SONIC_BUILD_INSTRUCTION) $@; $(COLLECT_BUILD_VERSION);" + @$(docker-image-cleanup) endif docker-cleanup: - $(docker-image-cleanup) + @$(docker-image-cleanup) +.PHONY: sonic-build-hooks sonic-build-hooks: @pushd src/sonic-build-hooks; TRUSTED_GPG_URLS=$(TRUSTED_GPG_URLS) make all; popd + @mkdir -p $(SLAVE_DIR)/buildinfo @cp src/sonic-build-hooks/buildinfo/sonic-build-hooks* $(SLAVE_DIR)/buildinfo -sonic-slave-base-build : sonic-build-hooks +sonic-slave-base-build : |sonic-build-hooks ifeq ($(MULTIARCH_QEMU_ENVIRON), y) @$(DOCKER_MULTIARCH_CHECK) endif @$(OVERLAY_MODULE_CHECK) - @echo Checking sonic-slave-base image: $(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) - @docker inspect --type image $(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) &> /dev/null || \ - { [ $(ENABLE_DOCKER_BASE_PULL) == y ] && { echo Image $(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) not found. Pulling...; } && \ - $(DOCKER_BASE_PULL) && \ - { docker tag $(REGISTRY_SERVER):$(REGISTRY_PORT)/$(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) $(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) && \ - scripts/collect_docker_version_files.sh $(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) target ; } } || \ - { echo Image $(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) not found. Building... ; \ - $(DOCKER_BASE_BUILD) ; \ - scripts/collect_docker_version_files.sh $(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) target ; } + @$(SONIC_SLAVE_BASE_BUILD) sonic-slave-build : sonic-slave-base-build - @echo Checking sonic-slave image: $(SLAVE_IMAGE):$(SLAVE_TAG) - @docker inspect --type image $(SLAVE_IMAGE):$(SLAVE_TAG) &> /dev/null || \ - { echo Image $(SLAVE_IMAGE):$(SLAVE_TAG) not found. Building... ; \ - $(DOCKER_BUILD) ; } + @$(SONIC_SLAVE_USER_BUILD) sonic-slave-bash : sonic-slave-build @$(DOCKER_RUN) -t $(SLAVE_IMAGE):$(SLAVE_TAG) bash diff --git a/build_debian.sh b/build_debian.sh index 1dd77f3bee6e..5f237e14798a 100755 --- a/build_debian.sh +++ b/build_debian.sh @@ -81,7 +81,9 @@ echo '[INFO] Build host debian base system...' TARGET_PATH=$TARGET_PATH scripts/build_debian_base_system.sh $CONFIGURED_ARCH $IMAGE_DISTRO $FILESYSTEM_ROOT # Prepare buildinfo -sudo scripts/prepare_debian_image_buildinfo.sh $CONFIGURED_ARCH $IMAGE_DISTRO $FILESYSTEM_ROOT $http_proxy +sudo DBGOPT="${DBGOPT}" SONIC_VERSION_CACHE=${SONIC_VERSION_CACHE} \ + scripts/prepare_debian_image_buildinfo.sh $CONFIGURED_ARCH $IMAGE_DISTRO $FILESYSTEM_ROOT $http_proxy + sudo chown root:root $FILESYSTEM_ROOT @@ -111,8 +113,28 @@ sudo LANG=C chroot $FILESYSTEM_ROOT mount sudo cp files/apt/sources.list.$CONFIGURED_ARCH $FILESYSTEM_ROOT/etc/apt/sources.list sudo cp files/apt/apt.conf.d/{81norecommends,apt-{clean,gzip-indexes,no-languages},no-check-valid-until} $FILESYSTEM_ROOT/etc/apt/apt.conf.d/ +sudo LANG=C chroot $FILESYSTEM_ROOT apt-get -y install apt-transport-https \ + ca-certificates \ + curl \ + gnupg2 \ + software-properties-common +if [[ $CONFIGURED_ARCH == armhf ]]; then + # update ssl ca certificates for secure pem + sudo https_proxy=$https_proxy LANG=C chroot $FILESYSTEM_ROOT c_rehash +fi +sudo https_proxy=$https_proxy LANG=C chroot $FILESYSTEM_ROOT curl -o /tmp/docker.asc -fsSL https://download.docker.com/linux/debian/gpg +sudo LANG=C chroot $FILESYSTEM_ROOT mv /tmp/docker.asc /etc/apt/trusted.gpg.d/ +sudo LANG=C chroot $FILESYSTEM_ROOT add-apt-repository \ + "deb [arch=$CONFIGURED_ARCH] https://download.docker.com/linux/debian $IMAGE_DISTRO stable" +sudo LANG=C chroot $FILESYSTEM_ROOT apt-get update +sudo LANG=C chroot $FILESYSTEM_ROOT apt-get -y install docker-ce=${DOCKER_VERSION} docker-ce-cli=${DOCKER_VERSION} containerd.io=${CONTAINERD_IO_VERSION} + +# Uninstall 'python3-gi' installed as part of 'software-properties-common' to remove debian version of 'PyGObject' +# pip version of 'PyGObject' will be installed during installation of 'sonic-host-services' +sudo LANG=C chroot $FILESYSTEM_ROOT apt-get -y remove software-properties-common gnupg2 python3-gi + ## Note: set lang to prevent locale warnings in your chroot -sudo LANG=C chroot $FILESYSTEM_ROOT apt-get -y update +sudo LANG=C chroot $FILESYSTEM_ROOT apt-get update sudo LANG=C chroot $FILESYSTEM_ROOT apt-get -y upgrade echo '[INFO] Install and setup eatmydata' @@ -233,25 +255,6 @@ echo '[INFO] Install docker' ## Otherwise Docker will fail to start sudo LANG=C chroot $FILESYSTEM_ROOT apt-get -y install apparmor sudo cp files/image_config/ntp/ntp-apparmor $FILESYSTEM_ROOT/etc/apparmor.d/local/usr.sbin.ntpd -sudo LANG=C chroot $FILESYSTEM_ROOT apt-get -y install apt-transport-https \ - ca-certificates \ - curl \ - gnupg2 \ - software-properties-common -if [[ $CONFIGURED_ARCH == armhf ]]; then - # update ssl ca certificates for secure pem - sudo https_proxy=$https_proxy LANG=C chroot $FILESYSTEM_ROOT c_rehash -fi -sudo https_proxy=$https_proxy LANG=C chroot $FILESYSTEM_ROOT curl -o /tmp/docker.asc -fsSL https://download.docker.com/linux/debian/gpg -sudo LANG=C chroot $FILESYSTEM_ROOT mv /tmp/docker.asc /etc/apt/trusted.gpg.d/ -sudo LANG=C chroot $FILESYSTEM_ROOT add-apt-repository \ - "deb [arch=$CONFIGURED_ARCH] https://download.docker.com/linux/debian $IMAGE_DISTRO stable" -sudo LANG=C chroot $FILESYSTEM_ROOT apt-get update -sudo LANG=C chroot $FILESYSTEM_ROOT apt-get -y install docker-ce=${DOCKER_VERSION} docker-ce-cli=${DOCKER_VERSION} containerd.io=${CONTAINERD_IO_VERSION} - -# Uninstall 'python3-gi' installed as part of 'software-properties-common' to remove debian version of 'PyGObject' -# pip version of 'PyGObject' will be installed during installation of 'sonic-host-services' -sudo LANG=C chroot $FILESYSTEM_ROOT apt-get -y remove software-properties-common gnupg2 python3-gi if [ "$INCLUDE_KUBERNETES" == "y" ] then @@ -403,10 +406,10 @@ if [[ $TARGET_BOOTLOADER == grub ]]; then GRUB_PKG=grub-efi-arm64-bin fi - sudo LANG=C DEBIAN_FRONTEND=noninteractive chroot $FILESYSTEM_ROOT apt-get -y download \ + sudo LANG=C DEBIAN_FRONTEND=noninteractive chroot $FILESYSTEM_ROOT apt-get install -d -o dir::cache=/var/cache/apt \ $GRUB_PKG - sudo mv $FILESYSTEM_ROOT/grub*.deb $FILESYSTEM_ROOT/$PLATFORM_DIR/grub + sudo cp $FILESYSTEM_ROOT/var/cache/apt/archives/grub*.deb $FILESYSTEM_ROOT/$PLATFORM_DIR/grub fi ## Disable kexec supported reboot which was installed by default @@ -596,7 +599,8 @@ if [[ $TARGET_BOOTLOADER == uboot ]]; then fi # Collect host image version files before cleanup -scripts/collect_host_image_version_files.sh $TARGET_PATH $FILESYSTEM_ROOT +DBGOPT="${DBGOPT}" SONIC_VERSION_CACHE=${SONIC_VERSION_CACHE} \ + scripts/collect_host_image_version_files.sh $CONFIGURED_ARCH $IMAGE_DISTRO $TARGET_PATH $FILESYSTEM_ROOT # Remove GCC sudo LANG=C DEBIAN_FRONTEND=noninteractive chroot $FILESYSTEM_ROOT apt-get -y remove gcc @@ -615,7 +619,7 @@ sudo LANG=C chroot $FILESYSTEM_ROOT bash -c 'rm -rf /usr/share/doc/* /usr/share/ [ -n "$http_proxy" ] && sudo rm -f $FILESYSTEM_ROOT/etc/apt/apt.conf.d/01proxy ## Clean up pip cache -sudo LANG=C chroot $FILESYSTEM_ROOT pip3 cache purge +sudo LANG=C chroot $FILESYSTEM_ROOT pip3 cache purge || true ## Umount all echo '[INFO] Umount all' diff --git a/dockers/dockerfile-macros.j2 b/dockers/dockerfile-macros.j2 index 9917cb17aba2..993e3fad677b 100644 --- a/dockers/dockerfile-macros.j2 +++ b/dockers/dockerfile-macros.j2 @@ -30,6 +30,10 @@ RUN cd /python-wheels/ && pip3 install {{ packages | join(' ') }} {%- endif %} {%- endmacro %} +{% macro install_python3_wheels(packages) -%} +RUN cd /python-wheels/ && python3 -m pip install {{ packages | join(' ') }} +{%- endmacro %} + {% macro copy_files(prefix, files, dest) -%} COPY \ {%- for file in files %} diff --git a/platform/broadcom/sai.mk b/platform/broadcom/sai.mk index fb6c21e585ba..d3c85d8a243d 100644 --- a/platform/broadcom/sai.mk +++ b/platform/broadcom/sai.mk @@ -17,4 +17,9 @@ $(BRCM_DNX_SAI)_URL = "$(LIBSAIBCM_DNX_URL_PREFIX)/$(BRCM_DNX_SAI)" SONIC_ONLINE_DEBS += $(BRCM_XGS_SAI) SONIC_ONLINE_DEBS += $(BRCM_DNX_SAI) $(BRCM_XGS_SAI_DEV)_DEPENDS += $(BRCM_XGS_SAI) + +$(BRCM_XGS_SAI)_SKIP_VERSION=y +$(BRCM_XGS_SAI_DEV)_SKIP_VERSION=y +$(BRCM_DNX_SAI)_SKIP_VERSION=y + $(eval $(call add_conflict_package,$(BRCM_XGS_SAI_DEV),$(LIBSAIVS_DEV))) diff --git a/rules/config b/rules/config index 23622bcc8740..9095d2ec9dc8 100644 --- a/rules/config +++ b/rules/config @@ -210,6 +210,15 @@ TRUSTED_GPG_URLS = https://packages.trafficmanager.net/debian/public_key.gpg,htt # docker: docker base images SONIC_VERSION_CONTROL_COMPONENTS ?= none +# SONIC_VERSION_CACHE allows the .deb,.py, wget, git, docker and go files to be stored in the cache path. This allows the submodules to +# cache standard installation package and restored back to avoid the package download every time. +# SONIC_VERSION_CACHE - Method of deb package caching +# none : no caching +# cache : Use cache if exists else build the source and update the cache +# SONIC_VERSION_CACHE_SOURCE - Defines the version cache location details +SONIC_VERSION_CACHE_METHOD ?= none +SONIC_VERSION_CACHE_SOURCE ?= $(SONIC_DPKG_CACHE_SOURCE)/vcache + # SONiC docker registry # # Set the env variable ENABLE_DOCKER_BASE_PULL = y to enable pulling sonic-slave docker from registry diff --git a/rules/functions b/rules/functions index 62b3c8a98375..f92b51bb6d6d 100644 --- a/rules/functions +++ b/rules/functions @@ -48,6 +48,7 @@ log_green = echo -e "$(GREEN)$(1)$(GRAY)" FLUSH_LOG = rm -f $@.log LOG_SIMPLE = &>> $(PROJECT_ROOT)/$@.log || { [ $$? -eq 0 ] || pushd $(PROJECT_ROOT) > /dev/null ; ./update_screen.sh -e $@ ; popd > /dev/null ; false ; } +LOG_SAVE = |& $(PROJECT_ROOT)/scripts/process_log.sh $(PROCESS_LOG_OPTION) &>> $(PROJECT_ROOT)/$@.log ; test $${PIPESTATUS[-2]} -eq 0 || { [ $$? -eq 0 ] || pushd $(PROJECT_ROOT) > /dev/null ; ./update_screen.sh -e $@ ; popd > /dev/null ; false ; } LOG = < /dev/null |& $(PROJECT_ROOT)/scripts/process_log.sh $(PROCESS_LOG_OPTION) &>> $(PROJECT_ROOT)/$@.log ; test $${PIPESTATUS[-2]} -eq 0 || { [ $$? -eq 0 ] || pushd $(PROJECT_ROOT) > /dev/null ; ./update_screen.sh -e $@ ; popd > /dev/null ; false ; } ############################################################################### diff --git a/scripts/build_debian_base_system.sh b/scripts/build_debian_base_system.sh index 1532befdbe58..f305d89c8fc4 100755 --- a/scripts/build_debian_base_system.sh +++ b/scripts/build_debian_base_system.sh @@ -1,5 +1,7 @@ #!/bin/bash +set -x + CONFIGURED_ARCH=$1 IMAGE_DISTRO=$2 FILESYSTEM_ROOT=$3 @@ -62,48 +64,71 @@ if [ -z "$PACKAGES" ]; then fi rm -rf $TEMP_DIR +BASE_VERSIONS=files/build/versions/host-base-image/versions-deb-${IMAGE_DISTRO}-${CONFIGURED_ARCH} BASEIMAGE_TARBALLPATH=$TARGET/baseimage BASEIMAGE_TARBALL=$(realpath -e $TARGET)/baseimage.tgz -rm -rf $BASEIMAGE_TARBALLPATH $BASEIMAGE_TARBALL - -ARCHIEVES=$BASEIMAGE_TARBALLPATH/var/cache/apt/archives -APTLIST=$BASEIMAGE_TARBALLPATH/var/lib/apt/lists -TARGET_DEBOOTSTRAP=$BASEIMAGE_TARBALLPATH/debootstrap -APTDEBIAN="$APTLIST/deb.debian.org_debian_dists_${DISTRO}_main_binary-${CONFIGURED_ARCH}_Packages" -DEBPATHS=$TARGET_DEBOOTSTRAP/debpaths -DEBOOTSTRAP_BASE=$TARGET_DEBOOTSTRAP/base -DEBOOTSTRAP_REQUIRED=$TARGET_DEBOOTSTRAP/required -[ -d $BASEIMAGE_TARBALLPATH ] && rm -rf $BASEIMAGE_TARBALLPATH -mkdir -p $ARCHIEVES -mkdir -p $APTLIST -mkdir -p $TARGET_DEBOOTSTRAP -URL_ARR=$(apt-get download --print-uris $PACKAGES | cut -d" " -f1 | tr -d "'") -PACKAGE_ARR=($PACKAGES) -LENGTH=${#PACKAGE_ARR[@]} -for ((i=0;i&2 - exit 1 - fi - filename=$(basename "$url") - SKIP_BUILD_HOOK=y wget $url -O $ARCHIEVES/$filename - echo $packagename >> $DEBOOTSTRAP_REQUIRED - echo "$packagename /var/cache/apt/archives/$filename" >> $DEBPATHS -done -touch $APTDEBIAN -touch $DEBOOTSTRAP_BASE -(cd $BASEIMAGE_TARBALLPATH && fakeroot tar -zcf $BASEIMAGE_TARBALL .) - -sudo debootstrap --verbose --variant=minbase --arch $CONFIGURED_ARCH --unpack-tarball=$BASEIMAGE_TARBALL $IMAGE_DISTRO $FILESYSTEM_ROOT +BASE_IMG_CACHE_PATH=/vcache/baseimage/ +mkdir -p ${BASE_IMG_CACHE_PATH} && chmod -f -R 777 ${BASE_IMG_CACHE_PATH} +BASE_IMG_SHA="$((echo ${IMAGE_DISTRO}/${CONFIGURED_ARCH};cat ${BASE_VERSIONS} ) | sha1sum | awk '{print substr($1,0,23);}')" +BASE_IMG_CACHE_FILE=${BASE_IMG_CACHE_PATH}/baseimage-${BASE_IMG_SHA}.tgz + +if [[ ! -z "$(get_version_cache_option)" && -f ${BASE_IMG_CACHE_FILE} ]]; then + # Load from version cache + cp ${BASE_IMG_CACHE_FILE} ${BASEIMAGE_TARBALL} + touch ${BASE_IMG_CACHE_FILE} +else + rm -rf $BASEIMAGE_TARBALLPATH $BASEIMAGE_TARBALL + + ARCHIEVES=$BASEIMAGE_TARBALLPATH/var/cache/apt/archives + APTLIST=$BASEIMAGE_TARBALLPATH/var/lib/apt/lists + TARGET_DEBOOTSTRAP=$BASEIMAGE_TARBALLPATH/debootstrap + APTDEBIAN="$APTLIST/deb.debian.org_debian_dists_${DISTRO}_main_binary-${CONFIGURED_ARCH}_Packages" + DEBPATHS=$TARGET_DEBOOTSTRAP/debpaths + DEBOOTSTRAP_BASE=$TARGET_DEBOOTSTRAP/base + DEBOOTSTRAP_REQUIRED=$TARGET_DEBOOTSTRAP/required + [ -d $BASEIMAGE_TARBALLPATH ] && rm -rf $BASEIMAGE_TARBALLPATH + mkdir -p $ARCHIEVES + mkdir -p $APTLIST + mkdir -p $TARGET_DEBOOTSTRAP + #PACKAGES=$(sed -E 's/=(=[^=]*)$/\1/' $BASE_VERSIONS) + #URL_ARR=$(/usr/bin/apt-get download --print-uris $PACKAGES | cut -d" " -f1 | tr -d "'") + URL_ARR=$(apt-get download --print-uris $PACKAGES | cut -d" " -f1 | tr -d "'") + PACKAGE_ARR=($PACKAGES) + LENGTH=${#PACKAGE_ARR[@]} + for ((i=0;i&2 + exit 1 + fi + filename=$(basename "$url") + SKIP_BUILD_HOOK=y wget $url -O $ARCHIEVES/$filename + echo $packagename >> $DEBOOTSTRAP_REQUIRED + echo "$packagename /var/cache/apt/archives/$filename" >> $DEBPATHS + done + touch $APTDEBIAN + touch $DEBOOTSTRAP_BASE + (cd $BASEIMAGE_TARBALLPATH && fakeroot tar -zcf $BASEIMAGE_TARBALL .) + + # Save it into version cache + if [[ ! -z "$(get_version_cache_option)" ]]; then + FLOCK ${BASE_IMG_CACHE_FILE} + cp ${BASEIMAGE_TARBALL} ${BASE_IMG_CACHE_FILE} + chmod -f 777 ${BASE_IMG_CACHE_FILE} + FUNLOCK ${BASE_IMG_CACHE_FILE} + fi +fi + +sudo SKIP_BUILD_HOOK=y debootstrap --verbose --variant=minbase --arch $CONFIGURED_ARCH --unpack-tarball=$BASEIMAGE_TARBALL $IMAGE_DISTRO $FILESYSTEM_ROOT RET=$? if [ $RET -ne 0 ]; then exit $RET fi +sudo rm -rf $FILESYSTEM_ROOT/var/lib/apt/lists generate_version_file diff --git a/scripts/collect_build_version_files.sh b/scripts/collect_build_version_files.sh index b650e421996b..6b38850b197b 100755 --- a/scripts/collect_build_version_files.sh +++ b/scripts/collect_build_version_files.sh @@ -1,5 +1,7 @@ #!/bin/bash +[[ ! -z "${DBGOPT}" && $0 =~ ${DBGOPT} ]] && set -x + RET=$1 BLDENV=$2 TARGET_PATH=$3 @@ -13,16 +15,21 @@ TIMESTAMP=$(date +"%Y%m%d%H%M%S") [ -z "$TARGET_PATH" ] && TARGET_PATH=./target VERSION_BUILD_PATH=$TARGET_PATH/versions/build -VERSION_SLAVE_PATH=$VERSION_BUILD_PATH/build-sonic-slave-${BLDENV} +VERSION_SLAVE_PATH=$VERSION_BUILD_PATH/sonic-slave-${BLDENV} LOG_VERSION_PATH=$VERSION_BUILD_PATH/log-${TIMESTAMP} +DEFAULT_VERSION_PATH=files/build/versions/default +BUILD_LOG_PATH=/sonic/target/versions/log/sonic-slave-${BLDENV}/ sudo chmod -R a+rw $BUILDINFO_PATH collect_version_files $LOG_VERSION_PATH ([ -d $BUILD_VERSION_PATH ] && [ ! -z "$(ls $BUILD_VERSION_PATH/)" ]) && cp -rf $BUILD_VERSION_PATH/* $LOG_VERSION_PATH/ mkdir -p $VERSION_SLAVE_PATH +mkdir -p ${BUILD_LOG_PATH} +([ -d ${LOG_PATH} ] && [ ! -z "$(ls ${LOG_PATH})" ]) && cp ${LOG_PATH}/* ${BUILD_LOG_PATH} +#scripts/versions_manager.py merge -t $VERSION_SLAVE_PATH -b $LOG_VERSION_PATH -e $POST_VERSION_PATH -i ${DEFAULT_VERSION_PATH} scripts/versions_manager.py merge -t $VERSION_SLAVE_PATH -b $LOG_VERSION_PATH -e $POST_VERSION_PATH -rm -rf $BUILD_VERSION_PATH/* +[ -d $BUILD_VERSION_PATH ] && rm -rf $BUILD_VERSION_PATH/* exit $RET diff --git a/scripts/collect_docker_version_files.sh b/scripts/collect_docker_version_files.sh index 73f0a9b53198..94f819fd0bf4 100755 --- a/scripts/collect_docker_version_files.sh +++ b/scripts/collect_docker_version_files.sh @@ -1,13 +1,26 @@ #!/bin/bash - + +[[ ! -z "${DBGOPT}" && $0 =~ ${DBGOPT} ]] && set -x + +if [[ -e /usr/local/share/buildinfo/scripts/buildinfo_base.sh ]];then + set -x +fi + DOCKER_IMAGE=$1 TARGET_PATH=$2 +DOCKER_IMAGE_TAG=$3 +DOCKER_PATH=$4 +DOCKER_FILE=$5 + +. scripts/utils.sh [ -z "$TARGET_PATH" ] && TARGET_PATH=./target DOCKER_IMAGE_NAME=$(echo $DOCKER_IMAGE | cut -d: -f1) DOCKER_CONTAINER=$DOCKER_IMAGE_NAME TARGET_VERSIONS_PATH=$TARGET_PATH/versions/dockers/$DOCKER_IMAGE_NAME +BUILD_LOG_PATH=target/versions/log/$DOCKER_IMAGE_NAME +mkdir -p ${BUILD_LOG_PATH} [ -d $TARGET_VERSIONS_PATH ] && rm -rf $TARGET_VERSIONS_PATH mkdir -p $TARGET_VERSIONS_PATH @@ -18,8 +31,91 @@ export DOCKER_CLI_EXPERIMENTAL=enabled if docker container inspect $DOCKER_IMAGE > /dev/null 2>&1; then docker container rm $DOCKER_IMAGE > /dev/null fi -docker create --name $DOCKER_CONTAINER --entrypoint /bin/bash $DOCKER_IMAGE +docker create --name $DOCKER_CONTAINER --entrypoint /bin/bash $DOCKER_IMAGE_TAG docker cp -L $DOCKER_CONTAINER:/etc/os-release $TARGET_VERSIONS_PATH/ docker cp -L $DOCKER_CONTAINER:/usr/local/share/buildinfo/pre-versions $TARGET_VERSIONS_PATH/ docker cp -L $DOCKER_CONTAINER:/usr/local/share/buildinfo/post-versions $TARGET_VERSIONS_PATH/ +docker cp -L $DOCKER_CONTAINER:/usr/local/share/buildinfo/log ${BUILD_LOG_PATH}/ + + + + +# Version package cache +IMAGE_DBGS_NAME=${DOCKER_IMAGE_NAME//-/_}_image_dbgs +if [[ ${DOCKER_IMAGE_NAME} == sonic-slave-* ]]; then + GLOBAL_CACHE_DIR=${SONIC_VERSION_CACHE_SOURCE}/${DOCKER_IMAGE_NAME} +else + GLOBAL_CACHE_DIR=/vcache/${DOCKER_IMAGE_NAME} +fi + +LOCAL_CACHE_FILE=target/vcache/${DOCKER_IMAGE_NAME}/cache.tgz +CACHE_ENCODE_FILE=${DOCKER_PATH}/vcache/cache.base64 +sleep 1; sync ${CACHE_ENCODE_FILE} + +SRC_VERSION_PATH=files/build/versions + +if [[ ! -z ${SONIC_VERSION_CACHE} && -e ${CACHE_ENCODE_FILE} ]]; then + + cat ${CACHE_ENCODE_FILE} | base64 -d >${LOCAL_CACHE_FILE} + rm -f ${CACHE_ENCODE_FILE} + + #VERSION_FILES="${DOCKER_PATH}/buildinfo/versions/versions-*" + VERSION_FILES="${SRC_VERSION_PATH}/dockers/${DOCKER_IMAGE_NAME}/versions-*-${DISTRO}-${ARCH} ${SRC_VERSION_PATH}/default/versions-*" + #DEP_FILES="${DOCKER_FILE}.j2" + DEP_FILES="${DOCKER_PATH}/Dockerfile.j2" + if [[ ${DOCKER_IMAGE_NAME} =~ '-dbg' ]]; then + DEP_FILES="${DEP_FILES} build_debug_docker_j2.sh" + fi + VERSION_SHA="$( (echo -n "${!IMAGE_DBGS_NAME}"; cat ${DEP_FILES} ${VERSION_FILES}) | sha1sum | awk '{print substr($1,0,23);}')" + GLOBAL_CACHE_FILE=${GLOBAL_CACHE_DIR}/${DOCKER_IMAGE_NAME}-${VERSION_SHA}.tgz + + GIT_FILE_STATUS=$(git status -s ${DEP_FILES}) + + if [ -f ${LOCAL_CACHE_FILE} ]; then + if [[ -z ${GIT_FILE_STATUS} && ! -e ${GLOBAL_CACHE_FILE} ]]; then + mkdir -p ${GLOBAL_CACHE_DIR} + chmod -f 777 ${GLOBAL_CACHE_DIR} + FLOCK ${GLOBAL_CACHE_FILE} + cp ${LOCAL_CACHE_FILE} ${GLOBAL_CACHE_FILE} + chmod -f 777 ${LOCAL_CACHE_FILE} ${GLOBAL_CACHE_FILE} + FUNLOCK ${GLOBAL_CACHE_FILE} + fi + fi +fi + + +# Docker hub pull cache +VERSION_FILE="${BUILDINFO_PATH}/versions/versions-docker" +BUILD_DOCKER_VERSION_FILE="target/versions/default/versions-docker" +PULL_DOCKER=$(awk '/^[[:space:]]*FROM /{print $2}' ${DOCKER_FILE} ) +VERSION=$(docker inspect -f '{{index .RepoDigests 0}}' ${PULL_DOCKER} 2>/dev/null | awk -F"@" '{print $NF}' ) + +if [ ! -z ${SONIC_VERSION_CACHE} ]; then + if [ ! -z ${PULL_DOCKER} ];then + + GLOBAL_DOCKER_CACHE_FILE=${GLOBAL_CACHE_DIR}/${PULL_DOCKER//:/-}-${VERSION//:/-}.tgz + if [ ! -f ${GLOBAL_DOCKER_CACHE_FILE} ]; then + mkdir -p ${GLOBAL_CACHE_DIR} + chmod -f 777 ${GLOBAL_CACHE_DIR} + FLOCK ${GLOBAL_DOCKER_CACHE_FILE} + docker save ${PULL_DOCKER} | gzip -c > ${GLOBAL_DOCKER_CACHE_FILE} + chmod -f 777 ${GLOBAL_DOCKER_CACHE_FILE} + FUNLOCK ${GLOBAL_DOCKER_CACHE_FILE} + fi + + fi +fi + +# Update the docker version file +if [ ! -f ${BUILD_DOCKER_VERSION_FILE} ] ; then + mkdir -p $(dirname ${BUILD_DOCKER_VERSION_FILE}) + touch ${BUILD_DOCKER_VERSION_FILE} +fi +BUILD_VERSION=$(grep "^${PULL_DOCKER}==${VERSION}" ${BUILD_DOCKER_VERSION_FILE} | awk -F"==" '{print $NF}') +if [[ -z ${BUILD_VERSION} && ${VERSION} == sha256:* ]]; then + echo "${PULL_DOCKER}==${VERSION}" >> ${BUILD_DOCKER_VERSION_FILE} + sort ${BUILD_DOCKER_VERSION_FILE} -o ${BUILD_DOCKER_VERSION_FILE} -u &> /dev/null +fi + + docker container rm $DOCKER_CONTAINER diff --git a/scripts/collect_host_image_version_files.sh b/scripts/collect_host_image_version_files.sh index 2cabc049d9c7..209a217ebd7e 100755 --- a/scripts/collect_host_image_version_files.sh +++ b/scripts/collect_host_image_version_files.sh @@ -1,13 +1,74 @@ #!/bin/bash -TARGET=$1 -FILESYSTEM_ROOT=$2 +[[ ! -z "${DBGOPT}" && $0 =~ ${DBGOPT} ]] && set -x + +ARCH=$1 +DISTRO=$2 +TARGET=$3 +FILESYSTEM_ROOT=$4 VERSIONS_PATH=$TARGET/versions/host-image +IMAGENAME="host-image" + +. scripts/utils.sh [ -d $VERSIONS_PATH ] && sudo rm -rf $VERSIONS_PATH mkdir -p $VERSIONS_PATH -sudo LANG=C chroot $FILESYSTEM_ROOT post_run_buildinfo +mkdir -p target/vcache/${HOST_IMAGE_NAME} +sudo LANG=C chroot $FILESYSTEM_ROOT post_run_buildinfo ${IMAGENAME} cp -r $FILESYSTEM_ROOT/usr/local/share/buildinfo/pre-versions $VERSIONS_PATH/ cp -r $FILESYSTEM_ROOT/usr/local/share/buildinfo/post-versions $VERSIONS_PATH/ + + +# Version cache +HOST_IMAGE_NAME=host-image +HOST_IMAGE_PATH=${VERSIONS_PATH} +GLOBAL_CACHE_DIR=/vcache/${HOST_IMAGE_NAME} +HOST_IMAGE_SRC_PATH=files/build/versions/${HOST_IMAGE_NAME} +LOCAL_CACHE_DIR=target/vcache/${HOST_IMAGE_NAME} +LOCAL_CACHE_FILE=${LOCAL_CACHE_DIR}/cache.tgz +mkdir -p ${LOCAL_CACHE_DIR} +sudo chmod -f 777 ${LOCAL_CACHE_DIR} + +if [ ! -z ${SONIC_VERSION_CACHE} ]; then + + #cat ${LOCAL_CACHE_DIR}/cache.base64| base64 -d >${LOCAL_CACHE_FILE} + #rm -f ${LOCAL_CACHE_DIR}/cache.base64 + + #cat ${FILESYSTEM_ROOT}/etc/apt/apt.conf.d/dpkg-cache + sudo rm -f ${FILESYSTEM_ROOT}/etc/apt/apt.conf.d/dpkg-cache + #sudo rm -rf ${FILESYSTEM_ROOT}/sonic + + + if [[ ! -f ${HOST_IMAGE_PATH}/post-versions/versions-web ]]; then + cp ${HOST_IMAGE_SRC_PATH}/versions-web ${HOST_IMAGE_PATH}/post-versions/versions-web + fi + + #VERSION_FILES="${HOST_IMAGE_PATH}/post-versions/versions-*-${DISTRO}-${ARCH} ${HOST_IMAGE_PATH}/post-versions/versions-web" + VERSION_FILES="${HOST_IMAGE_SRC_PATH}/versions-*-${DISTRO}-${ARCH} ${HOST_IMAGE_SRC_PATH}/versions-*" + DEP_FILES="build_debian.sh files/build_templates/sonic_debian_extension.j2" + VERSION_SHA="$( cat ${DEP_FILES} ${VERSION_FILES} | sha1sum | awk '{print substr($1,0,23);}')" + + GLOBAL_CACHE_FILE=${GLOBAL_CACHE_DIR}/${HOST_IMAGE_NAME}-${VERSION_SHA}.tgz + GIT_FILE_STATUS=$(git status -s ${DEP_FILES}) + + mkdir -p ${GLOBAL_CACHE_DIR} + sudo chmod -f 777 ${GLOBAL_CACHE_DIR} + BIN_CACHE_PATH=${FILESYSTEM_ROOT}/sonic/target/vcache/${HOST_IMAGE_NAME} + BIN_CACHE_FILE=${BIN_CACHE_PATH}/cache.tgz + cp ${BIN_CACHE_FILE} ${LOCAL_CACHE_FILE} + + # Save it into version cache + if [[ -z ${GIT_FILE_STATUS} && ! -f ${GLOBAL_CACHE_FILE} ]]; then + + FLOCK ${GLOBAL_CACHE_FILE} + cp ${LOCAL_CACHE_FILE} ${GLOBAL_CACHE_FILE} + sudo chmod -f 777 ${GLOBAL_CACHE_FILE} + FUNLOCK ${GLOBAL_CACHE_FILE} + fi + +fi + +sudo LANG=C chroot $FILESYSTEM_ROOT post_run_cleanup ${IMAGENAME} + diff --git a/scripts/generate_buildinfo_config.sh b/scripts/generate_buildinfo_config.sh index fe7657a6b6c9..b0ec54924246 100755 --- a/scripts/generate_buildinfo_config.sh +++ b/scripts/generate_buildinfo_config.sh @@ -6,5 +6,8 @@ BUILDINFO_CONFIG=$BUILDINFO_PATH/buildinfo/config/buildinfo.config mkdir -p $BUILDINFO_PATH/buildinfo/config -echo "PACKAGE_URL_PREFIX=$PACKAGE_URL_PREFIX" > $BUILDINFO_CONFIG -echo "SONIC_VERSION_CONTROL_COMPONENTS=$SONIC_VERSION_CONTROL_COMPONENTS" >> $BUILDINFO_CONFIG +echo "export PACKAGE_URL_PREFIX=$PACKAGE_URL_PREFIX" > $BUILDINFO_CONFIG +echo "export SONIC_VERSION_CONTROL_COMPONENTS=$SONIC_VERSION_CONTROL_COMPONENTS" >> $BUILDINFO_CONFIG +echo "export SONIC_VERSION_CACHE=${SONIC_VERSION_CACHE}" >> $BUILDINFO_CONFIG +echo "export SONIC_VERSION_CACHE_SOURCE=${SONIC_VERSION_CACHE_SOURCE}" >> $BUILDINFO_CONFIG +echo "export DISTRO=${DISTRO}" >> $BUILDINFO_CONFIG diff --git a/scripts/prepare_debian_image_buildinfo.sh b/scripts/prepare_debian_image_buildinfo.sh index 912e0de0b25f..2b32e29c0116 100755 --- a/scripts/prepare_debian_image_buildinfo.sh +++ b/scripts/prepare_debian_image_buildinfo.sh @@ -1,9 +1,14 @@ #!/bin/bash +[[ ! -z "${DBGOPT}" && $0 =~ ${DBGOPT} ]] && set -x + +set -x ARCH=$1 DISTRO=$2 FILESYSTEM_ROOT=$3 +HOST_IMAGE_NAME=host-image +IMAGENAME=${HOST_IMAGE_NAME} . /usr/local/share/buildinfo/scripts/buildinfo_base.sh VERSION_DEB_PREFERENCE="01-versions-deb" @@ -26,4 +31,68 @@ if [ "$ENABLE_VERSION_CONTROL_DEB" == "y" ]; then fi sudo LANG=C chroot $FILESYSTEM_ROOT /bin/bash -c "dpkg -i /usr/local/share/buildinfo/sonic-build-hooks_1.0_all.deb" -sudo LANG=C chroot $FILESYSTEM_ROOT /bin/bash -c "pre_run_buildinfo" +#sudo LANG=C chroot $FILESYSTEM_ROOT /bin/bash -c "pre_run_buildinfo" +sudo LANG=C chroot $FILESYSTEM_ROOT /bin/bash -c "echo export DISTRO=${DISTRO} >> /usr/local/share/buildinfo/config/buildinfo.config" +sudo LANG=C chroot $FILESYSTEM_ROOT /bin/bash -c "echo export IMAGENAME=${IMAGENAME} >> /usr/local/share/buildinfo/config/buildinfo.config" + + +# Version cache +HOST_IMAGE_PATH=files/build/versions/${HOST_IMAGE_NAME} +GLOBAL_CACHE_DIR=/vcache/${HOST_IMAGE_NAME} +APT_CACHE_PATH=${FILESYSTEM_ROOT}/var/cache/apt/archives +BIN_CACHE_PATH=${FILESYSTEM_ROOT}/sonic/target/vcache/${HOST_IMAGE_NAME} +BIN_CACHE_FILE=${BIN_CACHE_PATH}/cache.tgz +mkdir -p ${BIN_CACHE_PATH}/deb ${APT_CACHE_PATH} + +if [ ! -z ${SONIC_VERSION_CACHE} ]; then + + # Skip the deletion of cache files + cat <<-EOF >${FILESYSTEM_ROOT}/etc/apt/apt.conf.d/dpkg-cache + DPkg::Post-Invoke { "test -f /usr/bin/rsync && rsync -avzh --ignore-errors /var/cache/apt/archives/ /sonic/target/vcache/${HOST_IMAGE_NAME}/deb/; rm -f /var/cache/apt/archives/partial/*.deb /var/cache/apt/*.bin || true"; }; + APT::Update::Post-Invoke { "test -f /usr/bin/rsync && rsync -avzh --ignore-errors /var/cache/apt/archives/ /sonic/target/vcache/${HOST_IMAGE_NAME}/deb/; rm -f /var/cache/apt/archives/partial/*.deb /var/cache/apt/*.bin || true"; }; + APT::Keep-Downloaded-Packages "true"; + EOF + + VERSION_FILES="${HOST_IMAGE_PATH}/versions-*-${DISTRO}-${ARCH} ${HOST_IMAGE_PATH}/versions-*" + DEP_FILES="build_debian.sh files/build_templates/sonic_debian_extension.j2" + VERSION_SHA="$( cat ${DEP_FILES} ${VERSION_FILES} | sha1sum | awk '{print substr($1,0,23);}')" + + GLOBAL_CACHE_FILE=${GLOBAL_CACHE_DIR}/${HOST_IMAGE_NAME}-${VERSION_SHA}.tgz + LOCAL_CACHE_DIR=target/vcache/${HOST_IMAGE_NAME} + LOCAL_CACHE_FILE=${LOCAL_CACHE_DIR}/cache.tgz + GIT_FILE_STATUS=$(git status -s ${DEP_FILES}) + + if [[ ! -f ${LOCAL_CACHE_FILE} ]]; then + mkdir -p ${LOCAL_CACHE_DIR} + chmod -f 777 ${LOCAL_CACHE_DIR} + tar -zcf ${LOCAL_CACHE_FILE} -T /dev/null + chmod -f 777 ${LOCAL_CACHE_FILE} + fi + + if [[ -e ${GLOBAL_CACHE_FILE} ]]; then + cp ${GLOBAL_CACHE_FILE} ${LOCAL_CACHE_FILE} + touch ${GLOBAL_CACHE_FILE} + else + # When file is modified, Global SHA is calculated with the local change. + # Load from the previous version of build cache if exists + VERSIONS=( "HEAD" "HEAD~1" "HEAD~2" ) + for VERSION in ${VERSIONS[@]}; do + VERSION_PREV_SHA="$( git --no-pager show $(ls -f ${DEP_FILES} ${VERSION_FILES}|sed 's|.*|'${VERSION}':&|g') | sha1sum | awk '{print substr($1,0,23);}')" + GLOBAL_PREV_CACHE_FILE=${GLOBAL_CACHE_DIR}/${HOST_IMAGE_NAME}-${VERSION_PREV_SHA}.tgz + if [[ -e ${GLOBAL_PREV_CACHE_FILE} ]]; then + cp ${GLOBAL_PREV_CACHE_FILE} ${LOCAL_CACHE_FILE} + touch ${GLOBAL_PREV_CACHE_FILE} + break + fi + done + fi + cp ${LOCAL_CACHE_FILE} ${BIN_CACHE_FILE} + + # disable the validity check + # cp files/apt/apt.conf.d/no-check-valid-until /etc/apt/apt.conf.d + + +fi + +sudo LANG=C chroot $FILESYSTEM_ROOT /bin/bash -c "pre_run_buildinfo ${HOST_IMAGE_NAME}" +#sudo LANG=C chroot $FILESYSTEM_ROOT /bin/bash -c "apt-get update && apt-get -y install rsync " diff --git a/scripts/prepare_docker_buildinfo.sh b/scripts/prepare_docker_buildinfo.sh index d2b2c57185d1..c34aa14a85c3 100755 --- a/scripts/prepare_docker_buildinfo.sh +++ b/scripts/prepare_docker_buildinfo.sh @@ -1,16 +1,29 @@ #!/bin/bash +[[ ! -z "${DBGOPT}" && $0 =~ ${DBGOPT} ]] && set -x + +if [[ -e /usr/local/share/buildinfo/scripts/buildinfo_base.sh ]];then + set -x +fi + IMAGENAME=$1 DOCKERFILE=$2 ARCH=$3 -DOCKERFILE_TARGE=$4 +DOCKERFILE_TARGET=$4 DISTRO=$5 +. scripts/utils.sh + [ -z "$BUILD_SLAVE" ] && BUILD_SLAVE=n -[ -z "$DOCKERFILE_TARGE" ] && DOCKERFILE_TARGE=$DOCKERFILE -DOCKERFILE_PATH=$(dirname "$DOCKERFILE_TARGE") +[ -z "$DOCKERFILE_TARGET" ] && DOCKERFILE_TARGET=$DOCKERFILE +DOCKERFILE_PATH=$(dirname "$DOCKERFILE_TARGET") BUILDINFO_PATH="${DOCKERFILE_PATH}/buildinfo" BUILDINFO_VERSION_PATH="${BUILDINFO_PATH}/versions" +DOCKER_PATH=$(dirname $DOCKERFILE) + +if [[ ",$SONIC_VERSION_CONTROL_COMPONENTS," == *,all,* ]] || [[ ",$SONIC_VERSION_CONTROL_COMPONENTS," == *,docker,* ]]; then + ENABLE_VERSION_CONTROL_DOCKER=y +fi [ -d $BUILDINFO_PATH ] && rm -rf $BUILDINFO_PATH mkdir -p $BUILDINFO_VERSION_PATH @@ -25,29 +38,137 @@ fi # add script for reproducible build. using sha256 instead of tag for docker base image. scripts/docker_version_control.sh $@ +if [ ! -z ${SONIC_VERSION_CACHE} ]; then + export PIP_CACHE_DIR=/sonic/target/vcache/${IMAGENAME}/pip +fi + DOCKERFILE_PRE_SCRIPT='# Auto-Generated for buildinfo +ARG SONIC_VERSION_CACHE +ARG SONIC_VERSION_CONTROL_COMPONENTS COPY ["buildinfo", "/usr/local/share/buildinfo"] +COPY vcache/ /sonic/target/vcache/'${IMAGENAME}' RUN dpkg -i /usr/local/share/buildinfo/sonic-build-hooks_1.0_all.deb -RUN pre_run_buildinfo' +ENV IMAGENAME='${IMAGENAME}' +ENV DISTRO='${DISTRO}' +ENV PIP_CACHE_DIR='${PIP_CACHE_DIR}' +RUN pre_run_buildinfo '${IMAGENAME}' +' # Add the auto-generate code if it is not added in the target Dockerfile -if [ ! -f $DOCKERFILE_TARGE ] || ! grep -q "Auto-Generated for buildinfo" $DOCKERFILE_TARGE; then +if [ ! -f $DOCKERFILE_TARGET ] || ! grep -q "Auto-Generated for buildinfo" $DOCKERFILE_TARGET; then # Insert the docker build script before the RUN command LINE_NUMBER=$(grep -Fn -m 1 'RUN' $DOCKERFILE | cut -d: -f1) TEMP_FILE=$(mktemp) awk -v text="${DOCKERFILE_PRE_SCRIPT}" -v linenumber=$LINE_NUMBER 'NR==linenumber{print text}1' $DOCKERFILE > $TEMP_FILE # Append the docker build script at the end of the docker file - echo -e "\nRUN post_run_buildinfo" >> $TEMP_FILE + echo -e "\nRUN post_run_buildinfo ${IMAGENAME} " >> $TEMP_FILE + echo -e "\nRUN post_run_cleanup ${IMAGENAME} " >> $TEMP_FILE - cat $TEMP_FILE > $DOCKERFILE_TARGE + cat $TEMP_FILE > $DOCKERFILE_TARGET rm -f $TEMP_FILE fi # Copy the build info config +mkdir -p ${BUILDINFO_PATH} cp -rf src/sonic-build-hooks/buildinfo/* $BUILDINFO_PATH # Generate the version lock files scripts/versions_manager.py generate -t "$BUILDINFO_VERSION_PATH" -n "$IMAGENAME" -d "$DISTRO" -a "$ARCH" touch $BUILDINFO_VERSION_PATH/versions-deb + + +# Version cache +DOCKER_IMAGE_NAME=${IMAGENAME} +IMAGE_DBGS_NAME=${DOCKER_IMAGE_NAME//-/_}_image_dbgs + +if [[ ${DOCKER_IMAGE_NAME} == sonic-slave-* ]]; then + GLOBAL_CACHE_DIR=${SONIC_VERSION_CACHE_SOURCE}/${DOCKER_IMAGE_NAME} +else + GLOBAL_CACHE_DIR=/vcache/${DOCKER_IMAGE_NAME} +fi + +LOCAL_CACHE_DIR=target/vcache/${DOCKER_IMAGE_NAME} +mkdir -p ${LOCAL_CACHE_DIR} ${DOCKER_PATH}/vcache/ +chmod -f 777 ${LOCAL_CACHE_DIR} ${DOCKER_PATH}/vcache/ + +if [[ "$SKIP_BUILD_HOOK" == y || ${ENABLE_VERSION_CONTROL_DOCKER} != y ]]; then + exit 0 +fi + +SRC_VERSION_PATH=files/build/versions +if [ ! -z ${SONIC_VERSION_CACHE} ]; then + + #VERSION_FILES="${DOCKER_PATH}/buildinfo/versions/versions-*" + VERSION_FILES="${SRC_VERSION_PATH}/dockers/${DOCKER_IMAGE_NAME}/versions-*-${DISTRO}-${ARCH} ${SRC_VERSION_PATH}/default/versions-*" + #DEP_FILES="${DOCKERFILE}.j2" + DEP_FILES="Dockerfile.j2" + if [[ ${DOCKER_IMAGE_NAME} =~ '-dbg' ]]; then + DEP_DBG_FILES="build_debug_docker_j2.sh" + fi + VERSION_SHA="$( (echo -n "${!IMAGE_DBGS_NAME}"; \ + (cd ${DOCKER_PATH}; cat ${DEP_FILES}); \ + cat ${DEP_DBG_FILES} ${VERSION_FILES}) \ + | sha1sum | awk '{print substr($1,0,23);}')" + + GLOBAL_CACHE_FILE=${GLOBAL_CACHE_DIR}/${DOCKER_IMAGE_NAME}-${VERSION_SHA}.tgz + LOCAL_CACHE_FILE=${LOCAL_CACHE_DIR}/cache.tgz + GIT_FILE_STATUS=$(git status -s ${DEP_FILES}) + + if [[ ! -f ${LOCAL_CACHE_FILE} ]]; then + tar -zcf ${LOCAL_CACHE_FILE} -T /dev/null + chmod -f 777 ${LOCAL_CACHE_FILE} + fi + + if [[ -e ${GLOBAL_CACHE_FILE} ]]; then + cp ${GLOBAL_CACHE_FILE} ${LOCAL_CACHE_FILE} + touch ${GLOBAL_CACHE_FILE} + else + # When file is modified, Global SHA is calculated with the local change. + # Load from the previous version of build cache if exists + VERSIONS=( "HEAD" "HEAD~1" "HEAD~2" ) + for VERSION in ${VERSIONS[@]}; do + VERSION_PREV_SHA="$( (echo -n "${!IMAGE_DBGS_NAME}"; \ + (cd ${DOCKER_PATH}; git --no-pager show $(ls -f ${DEP_FILES}|sed 's|.*|'${VERSION}':./&|g')); \ + (git --no-pager show $(ls -f ${DEP_DBG_FILES} ${VERSION_FILES}|sed 's|.*|'${VERSION}':&|g'))) \ + | sha1sum | awk '{print substr($1,0,23);}')" + GLOBAL_PREV_CACHE_FILE=${GLOBAL_CACHE_DIR}/${DOCKER_IMAGE_NAME}-${VERSION_PREV_SHA}.tgz + if [[ -e ${GLOBAL_PREV_CACHE_FILE} ]]; then + cp ${GLOBAL_PREV_CACHE_FILE} ${LOCAL_CACHE_FILE} + touch ${GLOBAL_PREV_CACHE_FILE} + break + fi + done + fi + + rm -f ${DOCKER_PATH}/vcache/cache.tgz + ln -f ${LOCAL_CACHE_FILE} ${DOCKER_PATH}/vcache/cache.tgz + + +else + # Delete the cache file if version cache is disabled. + rm -f ${DOCKER_PATH}/vcache/cache.tgz +fi + +# Docker hub pull cache +VERSION_FILE="${BUILDINFO_PATH}/versions/versions-docker" +PULL_DOCKER=$(awk '/^[[:space:]]*FROM /{print $2}' ${DOCKERFILE} ) + +if [ -f ${VERSION_FILE} ]; then + + VERSION=$(grep "^${PULL_DOCKER}=" ${VERSION_FILE} | awk -F"==" '{print $NF}') + GLOBAL_DOCKER_CACHE_FILE=${GLOBAL_CACHE_DIR}/${PULL_DOCKER//:/-}-${VERSION//:/-}.tgz + if [[ ! -z ${VERSION} && ! -z ${PULL_DOCKER} && ( ${SONIC_VERSION_CONTROL_COMPONENTS} == "docker" || ${SONIC_VERSION_CONTROL_COMPONENTS} == "all" ) ]]; then + + if [ -f ${GLOBAL_DOCKER_CACHE_FILE} ]; then + docker load < ${GLOBAL_DOCKER_CACHE_FILE} + else + docker pull ${PULL_DOCKER}@${VERSION} + fi + fi + +fi + + + diff --git a/scripts/prepare_slave_container_buildinfo.sh b/scripts/prepare_slave_container_buildinfo.sh index 1fb2f006640b..ec72809b33c6 100755 --- a/scripts/prepare_slave_container_buildinfo.sh +++ b/scripts/prepare_slave_container_buildinfo.sh @@ -1,14 +1,20 @@ #!/bin/bash +[[ ! -z "${DBGOPT}" && $0 =~ ${DBGOPT} ]] && set -x + SLAVE_DIR=$1 ARCH=$2 DISTRO=$3 # Install the latest debian package sonic-build-hooks in the slave container -sudo dpkg -i --force-overwrite $SLAVE_DIR/buildinfo/sonic-build-hooks_*.deb > /dev/null +sudo dpkg -i --force-overwrite $SLAVE_DIR/buildinfo/sonic-build-hooks_*.deb &> /dev/null # Enable the build hooks -symlink_build_hooks +sudo symlink_build_hooks + +# set the global permissions +sudo chmod -f 777 /usr/local/share/buildinfo/log -R +sudo chmod -f 777 /usr/local/share/buildinfo/ -R # Build the slave running config cp -rf $SLAVE_DIR/buildinfo/* /usr/local/share/buildinfo/ @@ -21,8 +27,8 @@ apt-get update > /dev/null 2>&1 # Build the slave version config [ -d /usr/local/share/buildinfo/versions ] && rm -rf /usr/local/share/buildinfo/versions scripts/versions_manager.py generate -t "/usr/local/share/buildinfo/versions" -n "build-${SLAVE_DIR}" -d "$DISTRO" -a "$ARCH" -touch ${BUILDINFO_PATH}/versions/versions-deb +touch ${BUILDINFO_PATH}/versions/versions-deb ${BUILDINFO_PATH}/versions/versions-web -rm -f /etc/apt/preferences.d/01-versions-deb -([ "$ENABLE_VERSION_CONTROL_DEB" == "y" ] && [ -f $VERSION_DEB_PREFERENCE ]) && cp -f $VERSION_DEB_PREFERENCE /etc/apt/preferences.d/ +sudo rm -f /etc/apt/preferences.d/01-versions-deb +([ "$ENABLE_VERSION_CONTROL_DEB" == "y" ] && [ -f $VERSION_DEB_PREFERENCE ]) && sudo cp -f $VERSION_DEB_PREFERENCE /etc/apt/preferences.d/ exit 0 diff --git a/scripts/utils.sh b/scripts/utils.sh new file mode 100644 index 000000000000..3d2ca7015c42 --- /dev/null +++ b/scripts/utils.sh @@ -0,0 +1,2 @@ +#!/bin/bash +. $(dirname $0)/../src/sonic-build-hooks/scripts/utils.sh diff --git a/scripts/versions_manager.py b/scripts/versions_manager.py index a20684e97bb4..27b757721f25 100755 --- a/scripts/versions_manager.py +++ b/scripts/versions_manager.py @@ -4,6 +4,7 @@ import glob import os import sys +import re ALL_DIST = 'all' ALL_ARCH = 'all' @@ -24,7 +25,7 @@ class Component: arch -- Architectrue, such as amd64, arm64, etc ''' - def __init__(self, versions, ctype, dist=ALL_DIST, arch=ALL_ARCH): + def __init__(self, verbose=None, versions={}, ctype="deb", dist=ALL_DIST, arch=ALL_ARCH): self.versions = versions self.ctype = ctype if not dist: @@ -33,6 +34,7 @@ def __init__(self, versions, ctype, dist=ALL_DIST, arch=ALL_ARCH): arch = ALL_ARCH self.dist = dist self.arch = arch + self.verbose = verbose @classmethod def get_versions(cls, version_file): @@ -51,7 +53,7 @@ def get_versions(cls, version_file): return result def clone(self): - return Component(self.versions.copy(), self.ctype, self.dist, self.arch) + return Component(self.verbose, self.versions.copy(), self.ctype, self.dist, self.arch) def merge(self, versions, overwritten=True): for package in versions: @@ -71,7 +73,7 @@ def dump(self, config=False, priority=999): result.append(lines) else: result.append('{0}=={1}'.format(package, self.versions[package])) - return "\n".join(result) + return "\n".join(result)+'\n' def dump_to_file(self, version_file, config=False, priority=999): if len(self.versions) <= 0: @@ -92,6 +94,35 @@ def dump_to_path(self, file_path, config=False, priority=999): file_path = os.path.join(file_path, filename) self.dump_to_file(file_path, config, priority) + def print(self, file_path): + if len(self.versions) <= 0: + return + + if self.verbose is None: + return + + filename = self.get_filename() + file_path = os.path.join(file_path, filename) + if self.verbose and re.search("cfile=", self.verbose) \ + and not re.search(self.verbose, "cfile=all".format(filename)) \ + and not re.search(self.verbose, "cfile={}".format(filename)): + return + print("VERSION : {}".format(file_path)) + for package in sorted(self.versions.keys(), key=str.casefold): + if self.verbose and re.search("ctype=", self.verbose) \ + and not re.search("ctype=all".format(self.ctype), self.verbose) \ + and not re.search("ctype={}".format(self.ctype), self.verbose): + continue + if self.verbose and re.search("cname=", self.verbose) \ + and not re.search(self.verbose, "cname=all".format(package)) \ + and not re.search(self.verbose, "cname={}".format(package)): + continue + if self.verbose and re.search("cver=", self.verbose) \ + and not re.search(self.verbose, "cver=all".format(self.versions[package])) \ + and not re.search(self.verbose, "cver={}".format(self.versions[package])): + continue + print('{0}=={1}'.format(package, self.versions[package])) + # Check if the self component can be overwritten by the input component def check_overwritable(self, component, for_all_dist=False, for_all_arch=False): if self.ctype != component.ctype: @@ -153,9 +184,11 @@ class VersionModule: name -- The name of the image, such as sonic-slave-buster, docker-lldp, etc ''' - def __init__(self, name=None, components=None): + def __init__(self, verbose=None, name=None, components=None): self.name = name self.components = components + self.module_path="" + self.verbose=verbose # Overwrite the docker/host image/base image versions def overwrite(self, module, for_all_dist=False, for_all_arch=False): @@ -191,6 +224,7 @@ def get_config_module(self, source_path, dist, arch): module = default_module.clone(exclude_ctypes=DEFAULT_OVERWRITE_COMPONENTS) return self._get_config_module(module, dist, arch) + #Merge the default with specific version def _get_config_module(self, default_module, dist, arch): module = default_module.clone() default_ctype_components = module._get_components_per_ctypes() @@ -205,11 +239,11 @@ def _get_config_module(self, default_module, dist, arch): continue config_component = self._get_config_for_ctype(components, dist, arch) config_components.append(config_component) - config_module = VersionModule(self.name, config_components) + config_module = VersionModule(self.verbose, self.name, config_components) return config_module def _get_config_for_ctype(self, components, dist, arch): - result = Component({}, components[0].ctype, dist, arch) + result = Component(self.verbose, {}, components[0].ctype, dist, arch) for component in sorted(components, key = lambda x : x.get_order_keys()): if result.check_inheritable(component): result.merge(component.versions, True) @@ -224,7 +258,7 @@ def subtract(self, default_module): components = sorted(components, key = lambda x : x.get_order_keys()) for i in range(0, len(components)): component = components[i] - base_module = VersionModule(self.name, components[0:i]) + base_module = VersionModule(self.verbose, self.name, components[0:i]) config_module = base_module._get_config_module(default_module, component.dist, component.arch) config_components = config_module._get_components_by_ctype(ctype) if len(config_components) > 0: @@ -253,7 +287,7 @@ def _adjust_components_for_ctype(self, components): result = [] for i in range(0, len(components)): component = components[i] - inheritable_component = Component({}, component.ctype) + inheritable_component = Component(self.verbose, {}, component.ctype) for j in range(0, i): base_component = components[j] if component.check_inheritable(base_component): @@ -276,6 +310,7 @@ def load(self, image_path, filter_ctype=None, filter_dist=None, filter_arch=None file_paths = glob.glob(version_file_pattern) components = [] self.name = os.path.basename(image_path) + self.module_path = image_path self.components = components for file_path in file_paths: filename = os.path.basename(file_path) @@ -296,18 +331,25 @@ def load(self, image_path, filter_ctype=None, filter_dist=None, filter_arch=None if filter_arch and arch and filter_arch != arch and arch != ALL_ARCH: continue versions = Component.get_versions(file_path) - component = Component(versions, ctype, dist, arch) + component = Component(self.verbose, versions, ctype, dist, arch) components.append(component) + if self.verbose and re.search("stage=load", self.verbose): + component.print(file_path) def load_from_target(self, image_path): + self.module_path=image_path post_versions = os.path.join(image_path, 'post-versions') if os.path.exists(post_versions): self.load(post_versions) self.name = os.path.basename(image_path) + if self.verbose and re.search("stage=post", self.verbose): + self.print(post_versions) pre_versions = os.path.join(image_path, 'pre-versions') if os.path.exists(pre_versions): - pre_module = VersionModule() + pre_module = VersionModule(self.verbose) pre_module.load(pre_versions) + if self.verbose and re.search("stage=pre", self.verbose): + pre_module.print(pre_versions) self.subtract(pre_module) else: self.load(image_path) @@ -319,6 +361,15 @@ def dump(self, module_path, config=False, priority=999): for component in self.components: component.dump_to_path(module_path, config, priority) + def print(self, module_path): + if self.verbose is None: + return + if re.search("cmod=", self.verbose) \ + and not re.search(self.verbose, "cmod=all".format(self.name)) \ + and not re.search(self.verbose, "cmod={}".format(self.name)): + return + for component in self.components: + component.print(module_path) def filter(self, ctypes=[]): if 'all' in ctypes: return self @@ -340,7 +391,7 @@ def clone(self, ctypes=None, exclude_ctypes=None): if ctypes and component.ctype not in ctypes: continue components.append(component.clone()) - return VersionModule(self.name, components) + return VersionModule(self.verbose, self.name, components) def is_slave_module(self): return self.name.startswith('sonic-slave-') @@ -370,14 +421,18 @@ def get_module_path_by_name(cls, source_path, module_name): return os.path.join(source_path, 'files/build/versions/build', module_name) return os.path.join(source_path, 'files/build/versions/dockers', module_name) + def __repr__(self): + return repr(self.name) + class VersionBuild: ''' The VersionBuild consists of multiple version modules. ''' - def __init__(self, target_path="./target", source_path='.'): + def __init__(self, verbose=None, target_path="./target", source_path='.'): self.target_path = target_path self.source_path = source_path + self.verbose = verbose self.modules = {} def load_from_target(self): @@ -394,8 +449,11 @@ def load_from_target(self): for file_path in file_paths: if not os.path.isdir(file_path): continue - module = VersionModule() + module = VersionModule(self.verbose) module.load_from_target(file_path) + if self.verbose and re.search("stage=tmodname", self.verbose): + print("Target modname={}, path={}".format(module.name, file_path)) + module.print(file_path) modules[module.name] = module self._merge_dgb_modules() @@ -411,8 +469,11 @@ def load_from_source(self): modules = {} self.modules = modules for image_path in paths: - module = VersionModule() + module = VersionModule(self.verbose) module.load(image_path) + if self.verbose and re.search("stage=smodname", self.verbose): + print("Source modname={}, path={}".format(module.name, image_path)) + module.print(image_path) modules[module.name] = module def overwrite(self, build, for_all_dist=False, for_all_arch=False): @@ -430,6 +491,13 @@ def dump(self): module_path = self.get_module_path(module) module.dump(module_path) + def print(self, message=None): + if self.verbose is None: + return + if message is not None: + print("[============={}===========]".format(message)) + for module in [ self.modules[x] for x in (sorted(self.modules, key = lambda x : x)) ]: + module.print(module.module_path) def subtract(self, default_module): none_aggregatable_module = default_module.clone(exclude_ctypes=DEFAULT_OVERWRITE_COMPONENTS) for module in self.modules.values(): @@ -455,20 +523,39 @@ def freeze(self, rebuild=False, for_all_dist=False, for_all_arch=False, ctypes=[ self.dump() return self.load_from_source() + if self.verbose and re.search("stage=init", self.verbose): + self.print("Initial Source") + default_module = self.modules.get(DEFAULT_MODULE, None) - target_build = VersionBuild(self.target_path, self.source_path) + if self.verbose and re.search("stage=init", self.verbose): + default_module.print("Default Module") + + target_build = VersionBuild(self.verbose, self.target_path, self.source_path) target_build.load_from_target() target_build.filter(ctypes=ctypes) + if self.verbose and re.search("stage=init", self.verbose): + target_build.print("Initial Target") + if not default_module: raise Exception("The default versions does not exist") - for module in target_build.modules.values(): + for module in [ target_build.modules[x] for x in (sorted(target_build.modules, key = lambda x : x)) ] : if module.is_individule_version(): continue tmp_module = module.clone(exclude_ctypes=DEFAULT_OVERWRITE_COMPONENTS) default_module.overwrite(tmp_module, for_all_dist=True, for_all_arch=True) + if self.verbose and re.search("stage=tmp", self.verbose): + default_module.print("TMP DEFAULT MODULE") + target_build.subtract(default_module) + if self.verbose and re.search("stage=tmp", self.verbose): + target_build.print("After Subtract Target") + self.print("After Subtract Source") self.overwrite(target_build, for_all_dist=for_all_dist, for_all_arch=for_all_arch) - self.dump() + + if self.verbose and re.search("stage=add", self.verbose): + self.print("After Merge") + if not self.verbose or not re.search("dryrun", self.verbose): + self.dump() def filter(self, ctypes=[]): for module in self.modules.values(): @@ -485,14 +572,14 @@ def get_default_module(self): for dist in dists: versions = self._get_versions(ctype, dist) common_versions = self._get_common_versions(versions) - component = Component(common_versions, ctype, dist) + component = Component(self.verbose, common_versions, ctype, dist) components.append(component) else: versions = self._get_versions(ctype) common_versions = self._get_common_versions(versions) - component = Component(common_versions, ctype) + component = Component(self.verbose, common_versions, ctype) components.append(component) - return VersionModule(DEFAULT_MODULE, components) + return VersionModule(self.verbose, DEFAULT_MODULE, components) def get_aggregatable_modules(self): modules = {} @@ -619,11 +706,13 @@ def freeze(self): parser.add_argument('-d', '--for_all_dist', action='store_true', help='apply the versions for all distributions') parser.add_argument('-a', '--for_all_arch', action='store_true', help='apply the versions for all architectures') parser.add_argument('-c', '--ctypes', default='all', help='component types to freeze') + parser.add_argument('-v', '--verbose', default=None, help="verbose mode") args = parser.parse_args(sys.argv[2:]) ctypes = args.ctypes.split(',') if len(ctypes) == 0: ctypes = ['all'] - build = VersionBuild(target_path=args.target_path, source_path=args.source_path) + + build = VersionBuild(verbose=args.verbose, target_path=args.target_path, source_path=args.source_path) build.freeze(rebuild=args.rebuild, for_all_dist=args.for_all_dist, for_all_arch=args.for_all_arch, ctypes=ctypes) def merge(self): @@ -632,6 +721,8 @@ def merge(self): parser.add_argument('-m', '--module_path', default=None, help='merge path, use the target path if not specified') parser.add_argument('-b', '--base_path', required=True, help='base path, merge to the module path') parser.add_argument('-e', '--exclude_module_path', default=None, help='exclude module path') + parser.add_argument('-i', '--include_module_path', default=None, help='include module path') + parser.add_argument('-v', '--verbose', default=None, help="verbose mode") args = parser.parse_args(sys.argv[2:]) module_path = args.module_path if not module_path: @@ -640,15 +731,22 @@ def merge(self): print('The module path {0} does not exist'.format(module_path)) if not os.path.exists(args.target_path): os.makedirs(args.target_path) - module = VersionModule() + module = VersionModule(args.verbose) module.load(module_path) - base_module = VersionModule() + base_module = VersionModule(args.verbose) base_module.load(args.base_path) module.overwrite(base_module) if args.exclude_module_path: - exclude_module = VersionModule() + exclude_module = VersionModule(args.verbose) exclude_module.load(args.exclude_module_path) module.subtract(exclude_module) + if args.include_module_path: + include_module = VersionModule(args.verbose) + include_module.load(args.include_module_path) + if args.verbose: + include_module.print(args.include_module_path) + include_module.overwrite(module) + module.overwrite(include_module) module.dump(args.target_path) def generate(self): @@ -661,6 +759,7 @@ def generate(self): parser.add_argument('-d', '--distribution', required=True, help="distribution") parser.add_argument('-a', '--architecture', required=True, help="architecture") parser.add_argument('-p', '--priority', default=999, help="priority of the debian apt preference") + parser.add_argument('-v', '--verbose', default=None, help="verbose mode") args = parser.parse_args(sys.argv[2:]) module_path = args.module_path @@ -668,11 +767,20 @@ def generate(self): module_path = VersionModule.get_module_path_by_name(args.source_path, args.module_name) if not os.path.exists(args.target_path): os.makedirs(args.target_path) - module = VersionModule() + module = VersionModule(args.verbose) module.load(module_path, filter_dist=args.distribution, filter_arch=args.architecture) config = module.get_config_module(args.source_path, args.distribution, args.architecture) + if args.verbose: + config.print(args.source_path) config.clean_info(force=True) config.dump(args.target_path, config=True, priority=args.priority) if __name__ == "__main__": VersionManagerCommands() + + +""" +Dry run examples: + scripts/versions_manager.py freeze -v 'dryrun|cmod=docker-config-engine-stretch|cfile=versions-py2|cname=all|stage=sub|stage=add|stage=init|stage=tmodname|stage=tmp' + scripts/versions_manager.py freeze -v 'dryrun|cmod=default|cfile=versions-docker|cname=all|stage=sub|stage=add|stage=init|stage=tmodname|stage=tmp' +""" diff --git a/slave.mk b/slave.mk index 2e648b37f4c8..e8947746af9b 100644 --- a/slave.mk +++ b/slave.mk @@ -63,6 +63,7 @@ endif IMAGE_DISTRO := bullseye IMAGE_DISTRO_DEBS_PATH = $(TARGET_PATH)/debs/$(IMAGE_DISTRO) IMAGE_DISTRO_FILES_PATH = $(TARGET_PATH)/files/$(IMAGE_DISTRO) +export GO=/usr/local/go/bin/go # Python 2 packages will not be available in Bullseye ifeq ($(BLDENV),bullseye) @@ -90,6 +91,10 @@ export BLDENV ## Define configuration, help etc. ############################################################################### +# Install the updated build hooks if INSHOOKS flag is set +export INSHOOKS=y +$(if $(INSHOOKS),$(shell sudo dpkg -i /usr/local/share/buildinfo/sonic-build-hooks_1.0_all.deb &>/dev/null)) + .platform : ifneq ($(CONFIGURED_PLATFORM),generic) @echo Build system is not configured, please run make configure @@ -109,6 +114,7 @@ configure : @mkdir -p $(PYTHON_DEBS_PATH) @mkdir -p $(PYTHON_WHEELS_PATH) @mkdir -p $(DPKG_ADMINDIR_PATH) + @mkdir -p $(TARGET_PATH)/vcache @echo $(PLATFORM) > .platform @echo $(PLATFORM_ARCH) > .arch @@ -369,6 +375,10 @@ $(info "INCLUDE_PDE" : "$(INCLUDE_PDE)") $(info "SONIC_DEBUGGING_ON" : "$(SONIC_DEBUGGING_ON)") $(info "SONIC_PROFILING_ON" : "$(SONIC_PROFILING_ON)") $(info "KERNEL_PROCURE_METHOD" : "$(KERNEL_PROCURE_METHOD)") +$(info "SONIC_VERSION_CACHE_METHOD" : "$(SONIC_VERSION_CACHE)") +ifneq ($(SONIC_VERSION_CACHE),) +$(info "SONIC_VERSION_CACHE_SOURCE" : "$(SONIC_VERSION_CACHE_SOURCE)") +endif $(info "BUILD_TIMESTAMP" : "$(BUILD_TIMESTAMP)") $(info "BUILD_LOG_TIMESTAMP" : "$(BUILD_LOG_TIMESTAMP)") $(info "SONIC_IMAGE_VERSION" : "$(SONIC_IMAGE_VERSION)") @@ -407,8 +417,9 @@ $(info SONiC Build System for $(CONFIGURED_PLATFORM):$(CONFIGURED_ARCH)) endif # Overwrite the buildinfo in slave container -$(shell sudo scripts/prepare_slave_container_buildinfo.sh $(SLAVE_DIR) $(CONFIGURED_ARCH) $(BLDENV)) - +ifeq ($(filter clean,$(MAKECMDGOALS)),) +$(shell DBGOPT='$(DBGOPT)' scripts/prepare_slave_container_buildinfo.sh $(SLAVE_DIR) $(CONFIGURED_ARCH) $(BLDENV)) +endif include Makefile.cache ifeq ($(SONIC_USE_DOCKER_BUILDKIT),y) @@ -529,7 +540,7 @@ $(addprefix $(DEBS_PATH)/, $(SONIC_ONLINE_DEBS)) : $(DEBS_PATH)/% : .platform \ if [ -z '$($*_CACHE_LOADED)' ] ; then $(foreach deb,$* $($*_DERIVED_DEBS), \ - { curl -L -f -o $(DEBS_PATH)/$(deb) $($(deb)_CURL_OPTIONS) $($(deb)_URL) $(LOG) || { exit 1 ; } } ; ) + { SKIP_BUILD_HOOK=$($*_SKIP_VERSION) curl -L -f -o $(DEBS_PATH)/$(deb) $($(deb)_CURL_OPTIONS) $($(deb)_URL) $(LOG) || { exit 1 ; } } ; ) # Save the target deb into DPKG cache $(call SAVE_CACHE,$*,$@) @@ -546,7 +557,7 @@ SONIC_TARGET_LIST += $(addprefix $(DEBS_PATH)/, $(SONIC_ONLINE_DEBS)) # SONIC_ONLINE_FILES += $(SOME_NEW_FILE) $(addprefix $(FILES_PATH)/, $(SONIC_ONLINE_FILES)) : $(FILES_PATH)/% : .platform $(HEADER) - curl -L -f -o $@ $($*_CURL_OPTIONS) $($*_URL) $(LOG) + SKIP_BUILD_HOOK=$($*_SKIP_VERSION) curl -L -f -o $@ $($*_CURL_OPTIONS) $($*_URL) $(LOG) $(FOOTER) SONIC_TARGET_LIST += $(addprefix $(FILES_PATH)/, $(SONIC_ONLINE_FILES)) @@ -854,7 +865,7 @@ $(SONIC_INSTALL_WHEELS) : $(PYTHON_WHEELS_PATH)/%-install : .platform $$(addsuff while true; do if mkdir $(PYTHON_WHEELS_PATH)/pip_lock &> /dev/null; then ifneq ($(CROSS_BUILD_ENVIRON),y) - { sudo -E pip$($*_PYTHON_VERSION) install $(PYTHON_WHEELS_PATH)/$* $(LOG) && rm -d $(PYTHON_WHEELS_PATH)/pip_lock && break; } || { rm -d $(PYTHON_WHEELS_PATH)/pip_lock && exit 1 ; } + { sudo -E SKIP_BUILD_HOOK=Y pip$($*_PYTHON_VERSION) install $(PYTHON_WHEELS_PATH)/$* $(LOG) && rm -d $(PYTHON_WHEELS_PATH)/pip_lock && break; } || { rm -d $(PYTHON_WHEELS_PATH)/pip_lock && exit 1 ; } else # Link python script and data expected location to the cross python virtual env istallation locations { PATH=$(VIRTENV_BIN_CROSS_PYTHON$($*_PYTHON_VERSION)):${PATH} sudo -E $(VIRTENV_BIN_CROSS_PYTHON$($*_PYTHON_VERSION))/pip$($*_PYTHON_VERSION) install $(PYTHON_WHEELS_PATH)/$* $(LOG) && $(if $(findstring $(SONIC_CONFIG_ENGINE_PY3),$*),(sudo ln -s $(VIRTENV_BIN_CROSS_PYTHON$($*_PYTHON_VERSION))/sonic-cfggen /usr/local/bin/sonic-cfggen 2>/dev/null || true), true ) && $(if $(findstring $(SONIC_YANG_MODELS_PY3),$*),(sudo ln -s $(VIRTENV_BASE_CROSS_PYTHON3)/yang-models /usr/local/yang-models 2>/dev/null || true), true ) && rm -d $(PYTHON_WHEELS_PATH)/pip_lock && break; } || { rm -d $(PYTHON_WHEELS_PATH)/pip_lock && exit 1 ; } @@ -882,7 +893,11 @@ $(addprefix $(TARGET_PATH)/, $(SONIC_SIMPLE_DOCKER_IMAGES)) : $(TARGET_PATH)/%.g # Apply series of patches if exist if [ -f $($*.gz_PATH).patch/series ]; then pushd $($*.gz_PATH) && QUILT_PATCHES=../$(notdir $($*.gz_PATH)).patch quilt push -a; popd; fi # Prepare docker build info - scripts/prepare_docker_buildinfo.sh $* $($*.gz_PATH)/Dockerfile $(CONFIGURED_ARCH) $(TARGET_DOCKERFILE)/Dockerfile.buildinfo + PACKAGE_URL_PREFIX=$(PACKAGE_URL_PREFIX) \ + SONIC_ENFORCE_VERSIONS=$(SONIC_ENFORCE_VERSIONS) \ + TRUSTED_GPG_URLS=$(TRUSTED_GPG_URLS) \ + SONIC_VERSION_CACHE=$(SONIC_VERSION_CACHE) \ + DBGOPT='$(DBGOPT)' scripts/prepare_docker_buildinfo.sh $* $($*.gz_PATH)/Dockerfile $(CONFIGURED_ARCH) $(TARGET_DOCKERFILE)/Dockerfile.buildinfo $(LOG) docker info $(LOG) docker build --squash --no-cache \ --build-arg http_proxy=$(HTTP_PROXY) \ @@ -894,9 +909,12 @@ $(addprefix $(TARGET_PATH)/, $(SONIC_SIMPLE_DOCKER_IMAGES)) : $(TARGET_PATH)/%.g --build-arg docker_container_name=$($*.gz_CONTAINER_NAME) \ --label Tag=$(SONIC_IMAGE_VERSION) \ -f $(TARGET_DOCKERFILE)/Dockerfile.buildinfo \ - -t $(DOCKER_IMAGE_REF) $($*.gz_PATH) $(LOG) - scripts/collect_docker_version_files.sh $(DOCKER_IMAGE_REF) $(TARGET_PATH) + -t $(DOCKER_IMAGE_REF) $($*.gz_PATH) \ + | awk '/^_VCSTART_$$/,/^_VCEND_$$/{if($$0 !~ /_VCSTART_|_VCEND_/)print >"$($*.gz_PATH)/vcache/cache.base64";next}{print }' $(LOG) + DBGOPT='$(DBGOPT)' SONIC_VERSION_CACHE=$(SONIC_VERSION_CACHE) ARCH=${CONFIGURED_ARCH} \ + scripts/collect_docker_version_files.sh $* $(TARGET_PATH) $(DOCKER_IMAGE_REF) $($*.gz_PATH) $(LOG) $(call docker-image-save,$*,$@) + # Clean up if [ -f $($*.gz_PATH).patch/series ]; then pushd $($*.gz_PATH) && quilt pop -a -f; [ -d .pc ] && rm -rf .pc; popd; fi $(FOOTER) @@ -973,6 +991,7 @@ $(addprefix $(TARGET_PATH)/, $(DOCKER_IMAGES)) : $(TARGET_PATH)/%.gz : .platform mkdir -p $($*.gz_PATH)/files $(LOG) mkdir -p $($*.gz_PATH)/python-debs $(LOG) mkdir -p $($*.gz_PATH)/python-wheels $(LOG) + mkdir -p $(TARGET_PATH)/vcache/$* $($*.gz_PATH)/vcache $(LOG) sudo mount --bind $($*.gz_DEBS_PATH) $($*.gz_PATH)/debs $(LOG) sudo mount --bind $($*.gz_FILES_PATH) $($*.gz_PATH)/files $(LOG) sudo mount --bind $(PYTHON_DEBS_PATH) $($*.gz_PATH)/python-debs $(LOG) @@ -997,7 +1016,8 @@ $(addprefix $(TARGET_PATH)/, $(DOCKER_IMAGES)) : $(TARGET_PATH)/%.gz : .platform PACKAGE_URL_PREFIX=$(PACKAGE_URL_PREFIX) \ SONIC_ENFORCE_VERSIONS=$(SONIC_ENFORCE_VERSIONS) \ TRUSTED_GPG_URLS=$(TRUSTED_GPG_URLS) \ - scripts/prepare_docker_buildinfo.sh $* $($*.gz_PATH)/Dockerfile $(CONFIGURED_ARCH) + SONIC_VERSION_CACHE=$(SONIC_VERSION_CACHE) \ + DBGOPT='$(DBGOPT)' scripts/prepare_docker_buildinfo.sh $* $($*.gz_PATH)/Dockerfile $(CONFIGURED_ARCH) $(LOG) docker info $(LOG) docker build --squash --no-cache \ --build-arg http_proxy=$(HTTP_PROXY) \ @@ -1009,13 +1029,19 @@ $(addprefix $(TARGET_PATH)/, $(DOCKER_IMAGES)) : $(TARGET_PATH)/%.gz : .platform --build-arg docker_container_name=$($*.gz_CONTAINER_NAME) \ --build-arg frr_user_uid=$(FRR_USER_UID) \ --build-arg frr_user_gid=$(FRR_USER_GID) \ + --build-arg SONIC_VERSION_CACHE=$(SONIC_VERSION_CACHE) \ + --build-arg SONIC_VERSION_CACHE_SOURCE=$(SONIC_VERSION_CACHE_SOURCE) \ --build-arg image_version=$(SONIC_IMAGE_VERSION) \ --label com.azure.sonic.manifest="$$(cat $($*.gz_PATH)/manifest.json)" \ --label Tag=$(SONIC_IMAGE_VERSION) \ $($(subst -,_,$(notdir $($*.gz_PATH)))_labels) \ - -t $(DOCKER_IMAGE_REF) $($*.gz_PATH) $(LOG) - scripts/collect_docker_version_files.sh $(DOCKER_IMAGE_REF) $(TARGET_PATH) + -t $(DOCKER_IMAGE_REF) $($*.gz_PATH) \ + | awk '/^_VCSTART_$$/,/^_VCEND_$$/{if($$0 !~ /_VCSTART_|_VCEND_/)print >"$($*.gz_PATH)/vcache/cache.base64";next}{print }' $(LOG_SAVE) + + DBGOPT='$(DBGOPT)' SONIC_VERSION_CACHE=$(SONIC_VERSION_CACHE) ARCH=${CONFIGURED_ARCH}\ + scripts/collect_docker_version_files.sh $* $(TARGET_PATH) $(DOCKER_IMAGE_REF) $($*.gz_PATH) $($*.gz_PATH)/Dockerfile $(LOG) $(call docker-image-save,$*,$@) + # Clean up if [ -f $($*.gz_PATH).patch/series ]; then pushd $($*.gz_PATH) && quilt pop -a -f; [ -d .pc ] && rm -rf .pc; popd; fi @@ -1043,6 +1069,7 @@ $(addprefix $(TARGET_PATH)/, $(DOCKER_DBG_IMAGES)) : $(TARGET_PATH)/%-$(DBG_IMAG mkdir -p $($*.gz_PATH)/debs $(LOG) sudo mount --bind $($*.gz_DEBS_PATH) $($*.gz_PATH)/debs $(LOG) + mkdir -p $(TARGET_PATH)/vcache/$*-dbg $($*.gz_PATH)/vcache $(LOG) # Export variables for j2. Use path for unique variable names, e.g. docker_orchagent_debs $(eval export $(subst -,_,$(notdir $($*.gz_PATH)))_dbg_debs=$(shell printf "$(subst $(SPACE),\n,$(call expand,$($*.gz_DBG_DEPENDS),RDEPENDS))\n" | awk '!a[$$0]++')) $(eval export $(subst -,_,$(notdir $($*.gz_PATH)))_image_dbgs=$(shell printf "$(subst $(SPACE),\n,$(call expand,$($*.gz_DBG_IMAGE_PACKAGES)))\n" | awk '!a[$$0]++')) @@ -1054,7 +1081,8 @@ $(addprefix $(TARGET_PATH)/, $(DOCKER_DBG_IMAGES)) : $(TARGET_PATH)/%-$(DBG_IMAG PACKAGE_URL_PREFIX=$(PACKAGE_URL_PREFIX) \ SONIC_ENFORCE_VERSIONS=$(SONIC_ENFORCE_VERSIONS) \ TRUSTED_GPG_URLS=$(TRUSTED_GPG_URLS) \ - scripts/prepare_docker_buildinfo.sh $* $($*.gz_PATH)/Dockerfile-dbg $(CONFIGURED_ARCH) + SONIC_VERSION_CACHE=$(SONIC_VERSION_CACHE) \ + DBGOPT='$(DBGOPT)' scripts/prepare_docker_buildinfo.sh $*-dbg $($*.gz_PATH)/Dockerfile-dbg $(CONFIGURED_ARCH) $(LOG) docker info $(LOG) docker build \ $(if $($*.gz_DBG_DEPENDS), --squash --no-cache, --no-cache) \ @@ -1062,12 +1090,18 @@ $(addprefix $(TARGET_PATH)/, $(DOCKER_DBG_IMAGES)) : $(TARGET_PATH)/%-$(DBG_IMAG --build-arg https_proxy=$(HTTPS_PROXY) \ --build-arg no_proxy=$(NO_PROXY) \ --build-arg docker_container_name=$($*.gz_CONTAINER_NAME) \ + --build-arg SONIC_VERSION_CACHE=$(SONIC_VERSION_CACHE) \ + --build-arg SONIC_VERSION_CACHE_SOURCE=$(SONIC_VERSION_CACHE_SOURCE) \ --label com.azure.sonic.manifest="$$(cat $($*.gz_PATH)/manifest.json)" \ --label Tag=$(SONIC_IMAGE_VERSION) \ --file $($*.gz_PATH)/Dockerfile-dbg \ - -t $(DOCKER_DBG_IMAGE_REF) $($*.gz_PATH) $(LOG) - scripts/collect_docker_version_files.sh $(DOCKER_DBG_IMAGE_REF) $(TARGET_PATH) + -t $(DOCKER_DBG_IMAGE_REF) $($*.gz_PATH) \ + | awk '/^_VCSTART_$$/,/^_VCEND_$$/{if($$0 !~ /_VCSTART_|_VCEND_/)print >"$($*.gz_PATH)/vcache.base64";next}{print }' $(LOG_SAVE) + + DBGOPT='$(DBGOPT)' SONIC_VERSION_CACHE=$(SONIC_VERSION_CACHE) ARCH=${CONFIGURED_ARCH}\ + scripts/collect_docker_version_files.sh $*-dbg $(TARGET_PATH) $(DOCKER_DBG_IMAGE_REF) $($*.gz_PATH) $($*.gz_PATH)/Dockerfile-dbg $(LOG) $(call docker-image-save,$*-$(DBG_IMAGE_MARK),$@) + # Clean up docker rmi -f $(DOCKER_IMAGE_REF) &> /dev/null || true if [ -f $($*.gz_PATH).patch/series ]; then pushd $($*.gz_PATH) && quilt pop -a -f; [ -d .pc ] && rm -rf .pc; popd; fi @@ -1356,6 +1390,8 @@ $(addprefix $(TARGET_PATH)/, $(SONIC_INSTALLERS)) : $(TARGET_PATH)/% : \ SIGNING_KEY="$(SIGNING_KEY)" \ SIGNING_CERT="$(SIGNING_CERT)" \ PACKAGE_URL_PREFIX=$(PACKAGE_URL_PREFIX) \ + DBGOPT='$(DBGOPT)' \ + SONIC_VERSION_CACHE=$(SONIC_VERSION_CACHE) \ MULTIARCH_QEMU_ENVIRON=$(MULTIARCH_QEMU_ENVIRON) \ CROSS_BUILD_ENVIRON=$(CROSS_BUILD_ENVIRON) \ ./build_debian.sh $(LOG) @@ -1418,7 +1454,7 @@ SONIC_CLEAN_TARGETS += $(addsuffix -clean,$(addprefix $(TARGET_PATH)/, \ $(SONIC_SIMPLE_DOCKER_IMAGES) \ $(SONIC_INSTALLERS))) $(SONIC_CLEAN_TARGETS) :: $(TARGET_PATH)/%-clean : .platform - @rm -f $(TARGET_PATH)/$* + @rm -f $(TARGET_PATH)/$* target/versions/dockers/$(subst .gz,,$*) SONIC_CLEAN_STDEB_DEBS = $(addsuffix -clean,$(addprefix $(PYTHON_DEBS_PATH)/, \ $(SONIC_PYTHON_STDEB_DEBS))) @@ -1433,7 +1469,13 @@ $(SONIC_CLEAN_WHEELS) :: $(PYTHON_WHEELS_PATH)/%-clean : .platform clean-logs :: .platform @rm -f $(TARGET_PATH)/*.log $(DEBS_PATH)/*.log $(FILES_PATH)/*.log $(PYTHON_DEBS_PATH)/*.log $(PYTHON_WHEELS_PATH)/*.log -clean :: .platform clean-logs $$(SONIC_CLEAN_DEBS) $$(SONIC_CLEAN_FILES) $$(SONIC_CLEAN_TARGETS) $$(SONIC_CLEAN_STDEB_DEBS) $$(SONIC_CLEAN_WHEELS) +clean-versions :: .platform + @rm -rf target/versions/* + +vclean:: .platform + @sudo rm -rf target/vcache/* target/baseimage* + +clean :: .platform clean-logs clean-versions $$(SONIC_CLEAN_DEBS) $$(SONIC_CLEAN_FILES) $$(SONIC_CLEAN_TARGETS) $$(SONIC_CLEAN_STDEB_DEBS) $$(SONIC_CLEAN_WHEELS) ############################################################################### ## all diff --git a/sonic-slave-bullseye/Dockerfile.j2 b/sonic-slave-bullseye/Dockerfile.j2 index 2258946c781d..7aa29141f20c 100644 --- a/sonic-slave-bullseye/Dockerfile.j2 +++ b/sonic-slave-bullseye/Dockerfile.j2 @@ -605,7 +605,7 @@ RUN update-alternatives --set iptables /usr/sbin/iptables-legacy RUN pip3 install m2crypto==0.36.0 # Install swi tools -RUN pip3 install git+https://github.com/aristanetworks/swi-tools.git@bead66bf261770237f7dd21ace3774ba04a017e9 +RUN SKIP_BUILD_HOOK=y pip3 install git+https://github.com/aristanetworks/swi-tools.git@bead66bf261770237f7dd21ace3774ba04a017e9 {% if CONFIGURED_ARCH != "amd64" -%} # Install node.js for azure pipeline diff --git a/sonic-slave-buster/Dockerfile.j2 b/sonic-slave-buster/Dockerfile.j2 index b89d44656d3a..6e394953d233 100644 --- a/sonic-slave-buster/Dockerfile.j2 +++ b/sonic-slave-buster/Dockerfile.j2 @@ -504,7 +504,7 @@ RUN export VERSION=1.14.2 \ RUN pip3 install --upgrade pip RUN pip2 install --upgrade 'pip<21' -RUN apt-get purge -y python-pip python3-pip python3-yaml +RUN apt-get purge -y python3-yaml # For building Python packages RUN pip2 install setuptools==40.8.0 @@ -633,7 +633,7 @@ RUN update-alternatives --set iptables /usr/sbin/iptables-legacy RUN pip2 install m2crypto==0.36.0 # Install swi tools -RUN pip3 install git+https://github.com/aristanetworks/swi-tools.git@bead66bf261770237f7dd21ace3774ba04a017e9 +RUN SKIP_BUILD_HOOK=y pip3 install git+https://github.com/aristanetworks/swi-tools.git@bead66bf261770237f7dd21ace3774ba04a017e9 {% if CONFIGURED_ARCH != "amd64" -%} # Install node.js for azure pipeline diff --git a/src/redis-dump-load b/src/redis-dump-load index 758549795174..d5affcb9c140 160000 --- a/src/redis-dump-load +++ b/src/redis-dump-load @@ -1 +1 @@ -Subproject commit 758549795174dc6b3be70810e0e4d6308f80f1a3 +Subproject commit d5affcb9c140e55da738de4f18b360282fb0f9e0 diff --git a/src/sonic-build-hooks/Makefile b/src/sonic-build-hooks/Makefile index 786af9056332..05c35e352681 100644 --- a/src/sonic-build-hooks/Makefile +++ b/src/sonic-build-hooks/Makefile @@ -5,7 +5,7 @@ SONIC_BUILD_HOOKS_PACKAGE = $(SONIC_BUILD_HOOKS)_$(SONIC_BUILD_HOOKS_VERSION)_al BUILDINFO_DIR = buildinfo TMP_DIR = tmp SYMBOL_LINKS_SRC_DIR = ../../usr/local/share/buildinfo/scripts -SYMBOL_LINKS = symlink_build_hooks post_run_buildinfo pre_run_buildinfo collect_version_files +SYMBOL_LINKS = symlink_build_hooks post_run_buildinfo pre_run_buildinfo collect_version_files post_run_cleanup SONIC_BUILD_HOOKS_TARGET = $(BUILDINFO_DIR)/$(SONIC_BUILD_HOOKS_PACKAGE) BUILD_ROOT_DIR = $(TMP_DIR)/$(SONIC_BUILD_HOOKS) DEBIAN_DIR = $(BUILD_ROOT_DIR)/DEBIAN diff --git a/src/sonic-build-hooks/hooks/apt-get b/src/sonic-build-hooks/hooks/apt-get index 068293a3e352..b217a811775e 100755 --- a/src/sonic-build-hooks/hooks/apt-get +++ b/src/sonic-build-hooks/hooks/apt-get @@ -10,6 +10,12 @@ if [ -z "$REAL_COMMAND" ]; then exit 1 fi +if [[ "$SKIP_BUILD_HOOK" == y || ${ENABLE_VERSION_CONTROL_DEB} != y ]]; then + $REAL_COMMAND "$@" + exit $? +fi + + INSTALL=$(check_apt_install "$@") COMMAND_INFO="Locked by command: $REAL_COMMAND $@" if [ "$INSTALL" == y ]; then @@ -20,11 +26,29 @@ if [ "$INSTALL" == y ]; then [ "$lock_result" == y ] && release_apt_installation_lock exit $command_result else - if [[ "$1" == "purge" || "$@" == *" purge "* || "$@" == *" remove "* ]]; then + if [[ "$1" == "update" || "$@" == *" update "* ]]; then + set -x + if [[ "${FORCE_UPDATE}" == Y ]]; then + UPDATE=y + elif $(set -- ${PKG_CACHE_PATH}/apt/*_debian_dists_${DISTRO}_InRelease; test -e "$1"); then + exit 0 + else + UPDATE=y + fi + elif [[ "$1" == "purge" || "$@" == *" purge "* ]]; then # When running the purge command, collect the debian versions dpkg-query -W -f '${Package}==${Version}\n' >> $POST_VERSION_PATH/purge-versions-deb chmod a+wr $POST_VERSION_PATH/purge-versions-deb fi + $REAL_COMMAND "$@" + result=$? + #if [[ ${UPDATE} == y ]]; then + # cp /var/lib/apt/lists/* ${PKG_CACHE_PATH}/apt/ + # set +x + #fi + + exit ${result} + fi diff --git a/src/sonic-build-hooks/hooks/curl b/src/sonic-build-hooks/hooks/curl index 58bfe0395e55..9cd1254de436 100755 --- a/src/sonic-build-hooks/hooks/curl +++ b/src/sonic-build-hooks/hooks/curl @@ -3,4 +3,9 @@ . /usr/local/share/buildinfo/scripts/buildinfo_base.sh [ -z $REAL_COMMAND ] && REAL_COMMAND=/usr/bin/curl +if [[ "$SKIP_BUILD_HOOK" == y || ${ENABLE_VERSION_CONTROL_WEB} != y ]]; then + $REAL_COMMAND "$@" + exit $? +fi + REAL_COMMAND=$REAL_COMMAND download_packages "$@" diff --git a/src/sonic-build-hooks/hooks/easy_install b/src/sonic-build-hooks/hooks/easy_install new file mode 100644 index 000000000000..b6261e3e0357 --- /dev/null +++ b/src/sonic-build-hooks/hooks/easy_install @@ -0,0 +1,63 @@ +#!/bin/bash +set -x + +. /usr/local/share/buildinfo/scripts/buildinfo_base.sh +[ -z $REAL_COMMAND ] && REAL_COMMAND=$(get_command easy_install) +if [ -z "$REAL_COMMAND" ]; then + echo "The command easy_install does not exist." 1>&2 + exit 1 +fi + +if [ ! -x "$REAL_COMMAND" ]; then + return 1 +fi + +if [[ "$SKIP_BUILD_HOOK" == y || ${ENABLE_VERSION_CONTROL_EASY_INSTALL} != y ]]; then + $REAL_COMMAND "$@" + exit $? +fi + + +parameters=("$@") +for para in "${parameters[@]}" +do + if [[ "$para" == -* ]]; then + continue + elif [[ "$para" == pip* ]]; then + package=${para} + fi +done + +if [[ -f ${PIP_VERSION_FILE} ]]; then + package_name=$(V awk -F"==" '/'${para}'/{print $1 }' ${PIP_VERSION_FILE} ) + package_version=$( awk -F"==" '/'${para}'/{print $2 }' ${PIP_VERSION_FILE} ) + package="${package_name}==${package_version}" +fi + + +SHA_PIP=$(echo ${package} | sha1sum | awk '{print substr($1,0,23);}') +PIP_CACHE_DIR=${PKG_CACHE_PATH}/pip/${package//==/-} + +if [ "$ENABLE_VERSION_CONTROL_PY" != "y" ]; then + if [[ ! -z "$(get_version_cache_option)" && -e ${PIP_CACHE_DIR} ]]; then + FLOCK ${PIP_CACHE_DIR} + PIP_FILE=$(ls ${PIP_CACHE_DIR}/pip*) + sudo python ${PIP_FILE}/pip install --no-index ${PIP_FILE} + result=$? + chmod -f -R 777 ${PIP_CACHE_DIR} + touch ${PIP_CACHE_DIR} + FUNLOCK ${PIP_CACHE_DIR} + exit ${result} + fi +fi + +$REAL_COMMAND "$@" +result=$? + +if [[ ! -z "$(get_version_cache_option)" ]]; then + FLOCK ${PIP_CACHE_DIR} + sudo /usr/local/bin/pip download -d ${PIP_CACHE_DIR} ${package} + chmod -f -R 777 ${PIP_CACHE_DIR} + FUNLOCK ${PIP_CACHE_DIR} +fi +exit ${result} diff --git a/src/sonic-build-hooks/hooks/git b/src/sonic-build-hooks/hooks/git index 8c3e39f67a7d..06121bba0a7d 100755 --- a/src/sonic-build-hooks/hooks/git +++ b/src/sonic-build-hooks/hooks/git @@ -1,19 +1,29 @@ #!/bin/bash +. /usr/local/share/buildinfo/scripts/buildinfo_base.sh + +[ -z $REAL_COMMAND ] && REAL_COMMAND=$(get_command git) +if [[ "$SKIP_BUILD_HOOK" == y || ${ENABLE_VERSION_CONTROL_GIT} != y ]]; then + $REAL_COMMAND "$@" + exit $? +fi + parse_config(){ - . /usr/local/share/buildinfo/scripts/buildinfo_base.sh - REAL_COMMAND=$(get_command git) - version_file=$VERSION_PATH/versions-git - new_version_file=$BUILD_VERSION_PATH/versions-git + version_file=${GIT_VERSION_FILE} + new_version_file=${BUILD_GIT_VERSION_FILE} MODE_CLONE=0 + MODE_SUBMOD=0 # parse input parameters for i in "$@" do if [[ $i == "clone" ]];then MODE_CLONE=1 fi + if [[ $i == "--recurse-submodules" ]];then + MODE_SUBMOD=1 + fi done } @@ -43,33 +53,129 @@ get_clone_path(){ [ -z $clone_PATH ] && clone_PATH=`echo $URL | sed 's/\/$//' | awk -F/ '{print$NF}' | awk -F. '{print$1}'` } +bfind() +{ + queue="$1" + shift + + while [ -n "$queue" ] + do + echo "$queue" | xargs -I'{}' find {} -mindepth 1 -maxdepth 1 $* + queue=`echo "$queue" | xargs -I'{}' find {} -mindepth 1 -maxdepth 1 -type d` + done + +} + +git_submod_bundle(){ + git_filename=$1 + $REAL_COMMAND submodule foreach --recursive $REAL_COMMAND bundle create bundle.git --all + $REAL_COMMAND bundle create bundle.git --all + find . -name "bundle.git" -exec tar -uvf ${git_filename} {} \; +} + +git_submod_unbundle(){ + git_filename=$1 + clone_path=$2 + tar -xvf ${git_filename} + BUNDLE_LIST=$(bfind ./ -name "bundle.git" ) + echo ${BUNDLE_LIST} + for bundle in ${BUNDLE_LIST} + do + DIRPATH=${clone_path}/${bundle/#.\//} + $REAL_COMMAND clone ${bundle} $(dirname ${DIRPATH}) + done +} + main(){ parse_config "$@" + get_clone_path "$@" - # execute git. - $REAL_COMMAND "$@" - result=$? + if [[ $MODE_CLONE == 1 && $ENABLE_VERSION_CONTROL_GIT == "y" && ! -z "$(get_version_cache_option)" ]];then + [ -f $version_file ] && commit=`grep $URL $version_file | awk -F"==" '{print $NF}'` + FILE_NAME=$(echo $URL |sed 's|[:/]\+|-|g') + if [[ $MODE_SUBMOD == 1 ]]; then + GIT_FILENAME=${PKG_CACHE_PATH}/git/${FILE_NAME}-subundle-${commit}.tgz + else + GIT_FILENAME=${PKG_CACHE_PATH}/git/${FILE_NAME}-${commit}.tgz + fi + if [ ! -z ${commit} ] && [ -f ${GIT_FILENAME} ]; then + log_info "Loading from git cache URL:${URL}, SRC:${GIT_FILENAME}, DST:${clone_PATH}" + if [[ $MODE_SUBMOD == 1 ]]; then + git_submod_unbundle ${GIT_FILENAME} ${clone_PATH} + else + $REAL_COMMAND clone ${GIT_FILENAME} ${clone_PATH} + fi + result=$? + touch ${GIT_FILENAME} + exit $result + else + $REAL_COMMAND "$@" + result=$? + fi + else + # execute git. + $REAL_COMMAND "$@" + result=$? + fi + + #Return if there is any error + if [ ${result} -ne 0 ]; then + exit $result + fi # if sub command is not "clone", exit if [[ $MODE_CLONE != 1 ]];then exit $result fi - get_clone_path "$@" + pushd $clone_PATH &> /dev/null - commit_latest=`$REAL_COMMAND log -n 1 | head -n 1| awk '{print$2}'` - [ -f $version_file ] && commit=`grep $URL $version_file | awk -F== '{print$2}'` + commit_latest=`$REAL_COMMAND rev-parse HEAD` + [ -f $version_file ] && commit=`grep $URL $version_file | awk -F"==" '{print$2}'` + [ -z ${commit} ] && commit=${commit_latest} + + if [[ ! -z "$(get_version_cache_option)" ]]; then + FILE_NAME=$(echo $URL |sed 's|[:/]\+|-|g') + #Append subundle suffix + if [[ $MODE_SUBMOD == 1 ]]; then + GIT_FILENAME=${PKG_CACHE_PATH}/git/${FILE_NAME}-subundle-${commit}.tgz + else + GIT_FILENAME=${PKG_CACHE_PATH}/git/${FILE_NAME}-${commit}.tgz + fi + mkdir -p ${PKG_CACHE_PATH}/git + sudo chmod 777 ${PKG_CACHE_PATH}/git + if [[ ! -e ${GIT_FILENAME} ]]; then + FLOCK ${GIT_FILENAME} + if [[ $MODE_SUBMOD == 1 ]]; then + git_submod_bundle ${GIT_FILENAME} + else + $REAL_COMMAND bundle create ${GIT_FILENAME} --all + fi + chmod -f 777 ${GIT_FILENAME} + FUNLOCK ${GIT_FILENAME} + log_info "Saved into git cache URL:${URL}, DST:${GIT_FILENAME}, SRC:${clone_PATH}" + file ${GIT_FILENAME} + else + log_info "Already exists git cache URL:${URL}, DST:${GIT_FILENAME}, SRC:${clone_PATH}" + fi + fi + # control version or record version file if [[ $ENABLE_VERSION_CONTROL_GIT == "y" ]];then # control version - [ -n $commit ] && echo "git reset --hard $commit" >> ${new_version_file}.log - [ -n $commit ] && $REAL_COMMAND reset --hard $commit &> ${new_version_file}.log - else - # record version file - echo "$URL==$commit_latest" >> $new_version_file - sort $new_version_file -o $new_version_file -u &> /dev/null - fi + if [ ! -z ${commit} ] ; then + $REAL_COMMAND reset --hard ${commit} &> /dev/null + fi + fi + + [ -f $new_version_file ] && new_commit=`grep "$URL==${commit}" $new_version_file | awk -F"==" '{print $NF}'` + if [ -z ${new_commit} ]; then + # record version file + echo "$URL==${commit}" >> $new_version_file + sort $new_version_file -o $new_version_file -u &> /dev/null + fi + popd &> /dev/null exit $result diff --git a/src/sonic-build-hooks/hooks/go b/src/sonic-build-hooks/hooks/go new file mode 100644 index 000000000000..82cef679bf3e --- /dev/null +++ b/src/sonic-build-hooks/hooks/go @@ -0,0 +1,51 @@ +#!/bin/bash + +. /usr/local/share/buildinfo/scripts/buildinfo_base.sh +#[ -z $REAL_COMMAND ] && REAL_COMMAND=$(get_command go) +[ -z $REAL_COMMAND ] && REAL_COMMAND=/usr/local/go/bin/go +if [ -z "$REAL_COMMAND" ]; then + echo "The command go does not exist." 1>&2 + exit 1 +fi + +if [[ "$SKIP_BUILD_HOOK" == y || ${ENABLE_VERSION_CONTROL_GO} != y ]]; then + $REAL_COMMAND "$@" + exit $? +fi + +if [ ! -z "$(get_version_cache_option)" ]; then + GOFILE=${PWD}/go.mod + if [ -f ${GOFILE} ]; then + + SHAVAL=$(cat ${GOFILE} | sha1sum | awk '{print substr($1,0,11);}' ) + GO_CACHE_PATH=${PKG_CACHE_PATH}/go/${SHAVAL} + export GO=${REAL_COMMAND} + + EXT_PATH=${GOPATH##*/} + if [[ "${SHAVAL}" != "${EXT_PATH}" ]]; then + if [[ -e ${GO_CACHE_PATH} && ! -z ${GOPATH} && ! -e ${GOPATH}/${SHAVAL} ]]; then + FLOCK ${GO_CACHE_PATH} + rsync -avzh --ignore-errors ${GO_CACHE_PATH} ${GOPATH}/ &>/dev/null + chmod -f -R 777 ${GOPATH}/${SHAVAL} + touch ${GOPATH}/${SHAVAL} + FUNLOCK ${GO_CACHE_PATH} + fi + export GOPATH=${GOPATH}/${SHAVAL} + fi + GOPATH=${GOPATH} GO=${REAL_COMMAND} ${REAL_COMMAND} "$@" + + if [[ $2 == "vendor" && ! -e ${GO_CACHE_PATH} && -e ${GOPATH}/../${SHAVAL} ]]; then + mkdir -p ${GO_CACHE_PATH} + chmod -f 777 ${PKG_CACHE_PATH}/go + FLOCK ${GO_CACHE_PATH} + rsync -avzh --ignore-errors ${GOPATH}/ ${GO_CACHE_PATH} &>/dev/null + [ ! -z ${GO_CACHE_PATH} ] && chmod -f -R 777 ${GO_CACHE_PATH} + FUNLOCK ${GO_CACHE_PATH} + fi + + else + GOPATH=${GOPATH} GO=${REAL_COMMAND} ${REAL_COMMAND} "$@" + fi +else + GOPATH=${GOPATH} ${REAL_COMMAND} "$@" +fi diff --git a/src/sonic-build-hooks/hooks/pip2 b/src/sonic-build-hooks/hooks/pip2 index e2ccfff2aac0..66a8a9da6d79 100755 --- a/src/sonic-build-hooks/hooks/pip2 +++ b/src/sonic-build-hooks/hooks/pip2 @@ -10,4 +10,9 @@ if [ ! -x "$REAL_COMMAND" ]; then exit 1 fi +if [[ "$SKIP_BUILD_HOOK" == y || ${ENABLE_VERSION_CONTROL_PIP} != y ]]; then + $REAL_COMMAND "$@" + exit $? +fi + PIP_VERSION_FILE=$VERSION_FILE ENABLE_VERSION_CONTROL_PY=$ENABLE_VERSION_CONTROL_PY2 REAL_COMMAND=$REAL_COMMAND run_pip_command "$@" diff --git a/src/sonic-build-hooks/hooks/pip3 b/src/sonic-build-hooks/hooks/pip3 index 728e642eacc0..e0d5e794e5f9 100755 --- a/src/sonic-build-hooks/hooks/pip3 +++ b/src/sonic-build-hooks/hooks/pip3 @@ -8,5 +8,9 @@ if [ ! -x "$REAL_COMMAND" ]; then echo "The command pip3 not found" 1>&2 exit 1 fi +if [[ "$SKIP_BUILD_HOOK" == y || ${ENABLE_VERSION_CONTROL_PIP} != y ]]; then + $REAL_COMMAND "$@" + exit $? +fi PIP_VERSION_FILE=$VERSION_FILE ENABLE_VERSION_CONTROL_PY=$ENABLE_VERSION_CONTROL_PY3 REAL_COMMAND=$REAL_COMMAND run_pip_command "$@" diff --git a/src/sonic-build-hooks/hooks/python b/src/sonic-build-hooks/hooks/python new file mode 100644 index 000000000000..da0f2cbcf13c --- /dev/null +++ b/src/sonic-build-hooks/hooks/python @@ -0,0 +1,18 @@ +#!/bin/bash + +. /usr/local/share/buildinfo/scripts/buildinfo_base.sh + +VERSION_FILE="$BUILDINFO_PATH/versions/versions-python2" +REAL_COMMAND=$(get_command python) + +if [ ! -x "$REAL_COMMAND" ]; then + echo "The command python2 not found" 1>&2 + exit 1 +fi + +if [[ "$SKIP_BUILD_HOOK" == y || ${ENABLE_VERSION_CONTROL_PYTHON} != y ]]; then + $REAL_COMMAND "$@" + exit $? +fi + +PIP_VERSION_FILE=$VERSION_FILE ENABLE_VERSION_CONTROL_PY=$ENABLE_VERSION_CONTROL_PY2 REAL_COMMAND=$REAL_COMMAND run_python_command "$@" diff --git a/src/sonic-build-hooks/hooks/python2 b/src/sonic-build-hooks/hooks/python2 new file mode 100644 index 000000000000..6fc780761640 --- /dev/null +++ b/src/sonic-build-hooks/hooks/python2 @@ -0,0 +1,18 @@ +#!/bin/bash + +. /usr/local/share/buildinfo/scripts/buildinfo_base.sh + +VERSION_FILE="$BUILDINFO_PATH/versions/versions-python2" +REAL_COMMAND=$(get_command python2) + +if [ ! -x "$REAL_COMMAND" ]; then + echo "The command python2 not found" 1>&2 + exit 1 +fi + +if [[ "$SKIP_BUILD_HOOK" == y || ${ENABLE_VERSION_CONTROL_PYTHON} != y ]]; then + $REAL_COMMAND "$@" + exit $? +fi + +PIP_VERSION_FILE=$VERSION_FILE ENABLE_VERSION_CONTROL_PY=$ENABLE_VERSION_CONTROL_PY2 REAL_COMMAND=$REAL_COMMAND run_python_command "$@" diff --git a/src/sonic-build-hooks/hooks/python3 b/src/sonic-build-hooks/hooks/python3 new file mode 100644 index 000000000000..995c2daa62be --- /dev/null +++ b/src/sonic-build-hooks/hooks/python3 @@ -0,0 +1,17 @@ +#!/bin/bash + +. /usr/local/share/buildinfo/scripts/buildinfo_base.sh + +VERSION_FILE="$BUILDINFO_PATH/versions/versions-py3" +REAL_COMMAND=$(get_command python3) +if [ ! -x "$REAL_COMMAND" ]; then + echo "The command python3 not found" 1>&2 + exit 1 +fi + +if [[ "$SKIP_BUILD_HOOK" == y || ${ENABLE_VERSION_CONTROL_PYTHON} != y ]]; then + $REAL_COMMAND "$@" + exit $? +fi + +PIP_VERSION_FILE=$VERSION_FILE ENABLE_VERSION_CONTROL_PY=$ENABLE_VERSION_CONTROL_PY3 REAL_COMMAND=$REAL_COMMAND run_python_command "$@" diff --git a/src/sonic-build-hooks/hooks/wget b/src/sonic-build-hooks/hooks/wget index c4cb1a3d1be9..f36958b9f906 100755 --- a/src/sonic-build-hooks/hooks/wget +++ b/src/sonic-build-hooks/hooks/wget @@ -7,7 +7,7 @@ if [ -z "$REAL_COMMAND" ]; then exit 1 fi -if [ "$SKIP_BUILD_HOOK" == y ]; then +if [[ "$SKIP_BUILD_HOOK" == y || ${ENABLE_VERSION_CONTROL_WEB} != y ]]; then $REAL_COMMAND "$@" exit $? fi diff --git a/src/sonic-build-hooks/scripts/buildinfo_base.sh b/src/sonic-build-hooks/scripts/buildinfo_base.sh index ff249dfdfb91..1fb6b3292171 100755 --- a/src/sonic-build-hooks/scripts/buildinfo_base.sh +++ b/src/sonic-build-hooks/scripts/buildinfo_base.sh @@ -13,8 +13,20 @@ WEB_VERSION_FILE=$VERSION_PATH/versions-web BUILD_WEB_VERSION_FILE=$BUILD_VERSION_PATH/versions-web REPR_MIRROR_URL_PATTERN='http:\/\/packages.trafficmanager.net\/debian' DPKG_INSTALLTION_LOCK_FILE=/tmp/.dpkg_installation.lock +GIT_VERSION_FILE=$VERSION_PATH/versions-git +BUILD_GIT_VERSION_FILE=$BUILD_VERSION_PATH/versions-git . $BUILDINFO_PATH/config/buildinfo.config +if [ -e /vcache ]; then + PKG_CACHE_PATH=/vcache/${IMAGENAME} +else + PKG_CACHE_PATH=/sonic/target/vcache/${IMAGENAME} +fi +PKG_CACHE_FILE_NAME=${PKG_CACHE_PATH}/cache.tgz +mkdir -p ${PKG_CACHE_PATH} + +. $BUILDINFO_PATH/scripts/utils.sh + URL_PREFIX=$(echo "${PACKAGE_URL_PREFIX}" | sed -E "s#(//[^/]*/).*#\1#") @@ -26,10 +38,16 @@ fi log_err() { - echo "$1" >> $LOG_PATH/error.log + echo "$(date "+%F-%H-%M-%S") ERR $1" >> $LOG_PATH/error.log + echo "$1" 1>&2 +} +log_info() +{ + echo "$(date "+%F-%H-%M-%S") INFO $1" >> $LOG_PATH/info.log echo "$1" 1>&2 } + # Get the real command not hooked by sonic-build-hook package get_command() { @@ -67,6 +85,28 @@ check_if_url_exist() fi } +get_version_cache_option() +{ + #SONIC_VERSION_CACHE="cache" + if [ ! -z ${SONIC_VERSION_CACHE} ]; then + if [ ${SONIC_VERSION_CACHE} == "rcache" ]; then + echo -n "rcache" + elif [ ${SONIC_VERSION_CACHE} == "wcache" ]; then + echo -n "wcache" + elif [ ${SONIC_VERSION_CACHE} == "cache" ]; then + echo -n "wcache" + else + echo -n "" + return 1 + fi + echo -n "" + return 0 + fi + echo -n "" + return 1 +} + + # Enable or disable the reproducible mirrors set_reproducible_mirrors() { @@ -88,11 +128,19 @@ download_packages() local parameters=("$@") local filenames= declare -A filenames + declare -A SRC_FILENAMES + local url= + local real_version= + local SRC_FILENAME= + local DST_FILENAME= + for (( i=0; i<${#parameters[@]}; i++ )) do local para=${parameters[$i]} local nexti=$((i+1)) - if [[ "$para" == *://* ]]; then + if [[ "$para" == -o || "$para" == -O ]]; then + DST_FILENAME=${parameters[$nexti]} + elif [[ "$para" == *://* ]]; then local url=$para local real_version= @@ -100,13 +148,35 @@ download_packages() if [[ $url == ${URL_PREFIX}* ]]; then continue fi + local result=0 + WEB_CACHE_PATH=${PKG_CACHE_PATH}/web + mkdir -p ${WEB_CACHE_PATH} + local WEB_FILENAME=$(echo $url | awk -F"/" '{print $NF}' | cut -d? -f1 | cut -d# -f1) + if [ -z "${DST_FILENAME}" ];then + DST_FILENAME=${WEB_FILENAME} + fi + local VERSION=$(grep "^${url}=" $WEB_VERSION_FILE | awk -F"==" '{print $NF}') + if [ ! -z "${VERSION}" ]; then + + if [ "$ENABLE_VERSION_CONTROL_WEB" == y ]; then + if [ ! -z "$(get_version_cache_option)" ]; then + SRC_FILENAME=${WEB_CACHE_PATH}/${WEB_FILENAME}-${VERSION}.tgz + if [ -f ${SRC_FILENAME} ]; then + log_info "Loading from web cache URL:${url}, SRC:${SRC_FILENAME}, DST:${DST_FILENAME}" + cp ${SRC_FILENAME} ${DST_FILENAME} + touch ${SRC_FILENAME} + continue + fi + fi + fi + fi if [ "$ENABLE_VERSION_CONTROL_WEB" == y ]; then local version= local filename=$(echo $url | awk -F"/" '{print $NF}' | cut -d? -f1 | cut -d# -f1) [ -f $WEB_VERSION_FILE ] && version=$(grep "^${url}=" $WEB_VERSION_FILE | awk -F"==" '{print $NF}') if [ -z "$version" ]; then - echo "Warning: Failed to verify the package: $url, the version is not specified" 1>&2 + log_err "Warning: Failed to verify the package: $url, the version is not specified" 1>&2 continue fi @@ -120,7 +190,7 @@ download_packages() else real_version=$(get_url_version $url) if [ "$real_version" != "$version" ]; then - echo "Failed to verify url: $url, real hash value: $real_version, expected value: $version_filename" 1>&2 + log_err "Failed to verify url: $url, real hash value: $real_version, expected value: $version_filename" 1>&2 exit 1 fi fi @@ -128,34 +198,86 @@ download_packages() real_version=$(get_url_version $url) fi - echo "$url==$real_version" >> ${BUILD_WEB_VERSION_FILE} - fi - done + VERSION=${real_version} + local SRC_FILENAME=${WEB_CACHE_PATH}/${WEB_FILENAME}-${VERSION}.tgz + SRC_FILENAMES[${DST_FILENAME}]=${SRC_FILENAME} + + [ -f ${BUILD_WEB_VERSION_FILE} ] && build_version=$(grep "^${url}==${real_version}" ${BUILD_WEB_VERSION_FILE} | awk -F"==" '{print $NF}') + if [ -z ${build_version} ]; then + echo "$url==$real_version" >> ${BUILD_WEB_VERSION_FILE} + sort ${BUILD_WEB_VERSION_FILE} -o ${BUILD_WEB_VERSION_FILE} -u &> /dev/null + fi + fi + done $REAL_COMMAND "${parameters[@]}" local result=$? + #Return if there is any error + if [ ${result} -ne 0 ]; then + exit ${result} + fi + for filename in "${!filenames[@]}" do - [ -f "$filename" ] && mv "$filename" "${filenames[$filename]}" + if [ -f "$filename" ] ; then + mv "$filename" "${filenames[$filename]}" + fi done + if [[ -z "$(get_version_cache_option)" ]]; then + return $result + fi + + + #Save them into cache + for DST_FILENAME in "${!SRC_FILENAMES[@]}" + do + SRC_FILENAME=${SRC_FILENAMES[${DST_FILENAME}]} + if [[ ! -e "${DST_FILENAME}" || -e ${SRC_FILENAME} ]] ; then + continue + fi + FLOCK ${SRC_FILENAME} + cp ${DST_FILENAME} ${SRC_FILENAME} + chmod -f 777 ${SRC_FILENAME} + FUNLOCK ${SRC_FILENAME} + log_info "Saving into web cache URL:${url}, DST:${SRC_FILENAME}, SRC:${DST_FILENAME}" + done + return $result } run_pip_command() { parameters=("$@") + PIP_CACHE_PATH=${PKG_CACHE_PATH}/pip + PKG_CACHE_OPTION="--cache-dir=${PIP_CACHE_PATH}" + + if [[ ! -e ${PIP_CACHE_PATH} ]]; then + ${SUDO} mkdir -p ${PIP_CACHE_PATH} + chmod 777 ${PIP_CACHE_PATH} + fi if [ ! -x "$REAL_COMMAND" ] && [ " $1" == "freeze" ]; then return 1 fi - if [ "$ENABLE_VERSION_CONTROL_PY" != "y" ]; then - $REAL_COMMAND "$@" - return $? + if [[ "$SKIP_BUILD_HOOK" == Y || "$ENABLE_VERSION_CONTROL_PY" != "y" ]]; then + if [ ! -z "$(get_version_cache_option)" ]; then + mkdir -p ${PIP_CACHE_PATH} + FLOCK ${PIP_CACHE_PATH} + $REAL_COMMAND ${PKG_CACHE_OPTION} "$@" + local result=$? + chmod -f -R 777 ${PIP_CACHE_PATH} + touch ${PIP_CACHE_PATH} + FUNLOCK ${PIP_CACHE_PATH} + return ${result} + fi + $REAL_COMMAND "$@" + return $? fi + local found=n local install=n local pip_version_file=$PIP_VERSION_FILE @@ -181,12 +303,77 @@ run_pip_command() parameters+=("${tmp_version_file}") fi - $REAL_COMMAND "${parameters[@]}" - local result=$? + if [ ! -z "$(get_version_cache_option)" ]; then + FLOCK ${PIP_CACHE_PATH} + $REAL_COMMAND ${PKG_CACHE_OPTION} "${parameters[@]}" + local result=$? + chmod -f -R 777 ${PIP_CACHE_PATH} + touch ${PIP_CACHE_PATH} + FUNLOCK ${PIP_CACHE_PATH} + else + $REAL_COMMAND "${parameters[@]}" + local result=$? + fi + rm $tmp_version_file return $result } +# Note: set -x yields trace output that causes the module test failures. + +run_python_command() +{ + parameters=("$@") + + if [ ! -x "$REAL_COMMAND" ] && [ " $1" == "freeze" ]; then + return 1 + fi + + if [[ -e .eggs || $1 != "setup.py" || ( $2 != "bdist_wheel" && $2 != "test" ) \ + || "${SKIP_BUILD_HOOK}" == Y || "${ENABLE_VERSION_CONTROL_PY}" != "y" ]]; then + $REAL_COMMAND "$@" + local result=$? + return ${result} + fi + + PYTHON_CACHE_PATH=${PKG_CACHE_PATH}/python/ + PYTHON_FILE=${PWD}/setup.py + + if [[ ! -e ${PYTHON_CACHE_PATH} ]]; then + ${SUDO} mkdir -p ${PYTHON_CACHE_PATH} + chmod 777 ${PYTHON_CACHE_PATH} + fi + + if [ -e ${PYTHON_FILE} ]; then + SHAVAL=$(cat ${PYTHON_FILE} | sha1sum | awk '{print substr($1,0,11);}' ) + fi + PYTHON_CACHE_FILE=${PYTHON_CACHE_PATH}/$(basename ${PWD})-${SHAVAL}.tgz + + + # Load the .eggs from version cache if exists already + if [[ -e ${PYTHON_FILE} && ! -z "$(get_version_cache_option)" && -e ${PYTHON_CACHE_FILE} ]]; then + FLOCK ${PYTHON_CACHE_FILE} + tar -C ${PWD} -zxvf ${PYTHON_CACHE_FILE} + FUNLOCK ${PYTHON_CACHE_FILE} + fi + + # Run the real python command + $REAL_COMMAND "$@" + local result=$? + + # Save the .eggs into version cache + if [[ -e ${PYTHON_FILE} && ! -z "$(get_version_cache_option)" && ! -e ${PYTHON_CACHE_FILE} ]]; then + mkdir -p ${PYTHON_CACHE_PATH} + chmod -f 777 ${PYTHON_CACHE_PATH} + FLOCK ${PYTHON_CACHE_FILE} + tar -C ${PWD} -zcvf ${PYTHON_CACHE_FILE} .eggs + chmod -f 777 ${PYTHON_CACHE_FILE} + FUNLOCK ${PYTHON_CACHE_FILE} + fi + + return ${result} +} + # Check if the command is to install the debian packages # The apt/apt-get command format: apt/apt-get [options] {update|install} check_apt_install() @@ -208,7 +395,7 @@ check_apt_install() # Print warning message if a debian package version not specified when debian version control enabled. check_apt_version() { - VERSION_FILE="/usr/local/share/buildinfo/versions/versions-deb" + VERSION_FILE="${VERSION_PATH}/versions-deb" local install=$(check_apt_install "$@") if [ "$ENABLE_VERSION_CONTROL_DEB" == "y" ] && [ "$install" == "y" ]; then for para in "$@" @@ -285,10 +472,10 @@ update_version_file() if [ ! -f "$pre_version_file" ]; then return 0 fi - local pacakge_versions="$(cat $pre_version_file)" - [ -f "$version_file" ] && pacakge_versions="$pacakge_versions $(cat $version_file)" + local package_versions="$(cat $pre_version_file)" + [ -f "$version_file" ] && package_versions="$package_versions $(cat $version_file)" declare -A versions - for pacakge_version in $pacakge_versions; do + for pacakge_version in $package_versions; do package=$(echo $pacakge_version | awk -F"==" '{print $1}') version=$(echo $pacakge_version | awk -F"==" '{print $2}') if [ -z "$package" ] || [ -z "$version" ]; then @@ -322,4 +509,8 @@ ENABLE_VERSION_CONTROL_PY2=$(check_version_control "py2") ENABLE_VERSION_CONTROL_PY3=$(check_version_control "py3") ENABLE_VERSION_CONTROL_WEB=$(check_version_control "web") ENABLE_VERSION_CONTROL_GIT=$(check_version_control "git") +ENABLE_VERSION_CONTROL_PIP=$(check_version_control "pip") +ENABLE_VERSION_CONTROL_PYTHON=$(check_version_control "python") +ENABLE_VERSION_CONTROL_EASY_INSTALL=$(check_version_control "easy_install") +ENABLE_VERSION_CONTROL_GO=$(check_version_control "go") ENABLE_VERSION_CONTROL_DOCKER=$(check_version_control "docker") diff --git a/src/sonic-build-hooks/scripts/collect_version_files b/src/sonic-build-hooks/scripts/collect_version_files index a4b33eeaa897..d998020de733 100755 --- a/src/sonic-build-hooks/scripts/collect_version_files +++ b/src/sonic-build-hooks/scripts/collect_version_files @@ -1,19 +1,23 @@ #!/bin/bash +TARGET_PATH=$1 . /usr/local/share/buildinfo/scripts/buildinfo_base.sh -TARGET_PATH=$1 -[ -z "$TARGET_PATH" ] && TARGET_PATH=$POST_VERSION_PATH +[ -d ${TARGET_PATH} ] && rm -rf ${TARGET_PATH} + ARCH=$(dpkg --print-architecture) DIST=$(grep VERSION_CODENAME /etc/os-release | cut -d= -f2) ([ -z "$DIST" ] && grep -q jessie /etc/os-release) && DIST=jessie mkdir -p $TARGET_PATH chmod a+rw $TARGET_PATH +SKIP_VERSION_PACKAGE="libsaibcm|libpaibcm|linuxptp|@ file://" +dpkg-query -W -f '${Package}==${Version}\n' | grep -Ev "${SKIP_VERSION_PACKAGE}" > "${TARGET_PATH}/versions-deb-${DIST}-${ARCH}" +([ -x "/usr/local/bin/pip2" ] || [ -x "/usr/bin/pip2" ]) && pip2 freeze --all| grep -Ev "${SKIP_VERSION_PACKAGE}" > "${TARGET_PATH}/versions-py2-${DIST}-${ARCH}" +([ -x "/usr/local/bin/pip3" ] || [ -x "/usr/bin/pip3" ]) && pip3 freeze --all| grep -Ev "${SKIP_VERSION_PACKAGE}" > "${TARGET_PATH}/versions-py3-${DIST}-${ARCH}" -dpkg-query -W -f '${Package}==${Version}\n' >> "${TARGET_PATH}/versions-deb-${DIST}-${ARCH}" -([ -x "/usr/local/bin/pip2" ] || [ -x "/usr/bin/pip2" ]) && pip2 freeze >> "${TARGET_PATH}/versions-py2-${DIST}-${ARCH}" -([ -x "/usr/local/bin/pip3" ] || [ -x "/usr/bin/pip3" ]) && pip3 freeze >> "${TARGET_PATH}/versions-py3-${DIST}-${ARCH}" +[ -f ${BUILD_WEB_VERSION_FILE} ] && cp ${BUILD_WEB_VERSION_FILE} ${TARGET_PATH} +[ -f ${BUILD_GIT_VERSION_FILE} ] && cp ${BUILD_GIT_VERSION_FILE} ${TARGET_PATH} ## Add the the packages purged [ -f $POST_VERSION_PATH/purge-versions-deb ] && cat $POST_VERSION_PATH/purge-versions-deb >> "${TARGET_PATH}/versions-deb-${DIST}-${ARCH}" diff --git a/src/sonic-build-hooks/scripts/post_run_buildinfo b/src/sonic-build-hooks/scripts/post_run_buildinfo index 97f47f7efcf1..055da6de9209 100755 --- a/src/sonic-build-hooks/scripts/post_run_buildinfo +++ b/src/sonic-build-hooks/scripts/post_run_buildinfo @@ -1,16 +1,32 @@ #!/bin/bash +IMAGENAME=$1 + . /usr/local/share/buildinfo/scripts/buildinfo_base.sh +set -x + +[ -d $POST_VERSION_PATH ] && rm -rf $POST_VERSION_PATH + # Collect the version files collect_version_files $POST_VERSION_PATH -[ -d $BUILD_VERSION_PATH ] && [ ! -z "$(ls -A $BUILD_VERSION_PATH)" ] && cp -rf $BUILD_VERSION_PATH/* $POST_VERSION_PATH -rm -rf $BUILD_VERSION_PATH/* +if [ ! -z "$(get_version_cache_option)" ]; then + # Skip the deletion of cache files + cat <<-EOF >/etc/apt/apt.conf.d/docker-clean + DPkg::Post-Invoke { "rm -f /var/cache/apt/archives/*.deb /var/cache/apt/archives/partial/*.deb /var/cache/apt/*.bin || true"; }; + APT::Update::Post-Invoke { "rm -f /var/cache/apt/archives/*.deb /var/cache/apt/archives/partial/*.deb /var/cache/apt/*.bin || true"; }; + EOF + + cp -r ~/.cache ${PKG_CACHE_PATH}/. + cp /var/lib/apt/lists/* ${PKG_CACHE_PATH}/apt/ + tar -C ${PKG_CACHE_PATH} --exclude=cache.tgz -zcvf ${PKG_CACHE_FILE_NAME} . + #scp -o StrictHostKeyChecking=no -i /ssh/id_rsa -P ${CPORT} ${PKG_CACHE_FILE_NAME} ${CUSER}@172.17.0.1:/${PKG_CACHE_PATH}/ + set +x + if [[ ! ${IMAGENAME} =~ host-image ]]; then + sleep 1;echo -e "\n_VCSTART_"; (cat ${PKG_CACHE_FILE_NAME} | base64); echo -e "_VCEND_\n";sleep 1 + fi + set -x +fi -# Disable the build hooks -symlink_build_hooks -d -set_reproducible_mirrors -d -# Remove the version deb preference -rm -f $VERSION_DEB_PREFERENCE diff --git a/src/sonic-build-hooks/scripts/post_run_cleanup b/src/sonic-build-hooks/scripts/post_run_cleanup new file mode 100755 index 000000000000..9c3d2ea8a48a --- /dev/null +++ b/src/sonic-build-hooks/scripts/post_run_cleanup @@ -0,0 +1,40 @@ +#!/bin/bash + +IMAGENAME=$1 + +. /usr/local/share/buildinfo/scripts/buildinfo_base.sh + +set -x + + +if [ ! -z "$(get_version_cache_option)" ]; then + + #Delete the rsync package files + if [[ ! ${IMAGENAME} =~ -slave- ]]; then + /usr/bin/apt-get purge -y --auto-remove rsync + fi +fi + +apt-get -s clean -y +apt-get -s autoclean -y +apt-get -s autoremove -y +#apt-get -s autoremove -y --purge +rm -f /var/cache/apt/archives/*.deb /var/cache/apt/*.bin + +if [[ ! ${IMAGENAME} =~ -slave- ]]; then + rm -f /var/lib/apt/lists/* +fi + +rm -rf /sonic/target /ssh +rm -f /tmp/* +rm -rf /debs /python-wheels ~/.cache +find / | grep -E "__pycache__" | xargs rm -rf + +rm -rf $BUILD_VERSION_PATH/* + +# Disable the build hooks +symlink_build_hooks -d +#set_reproducible_mirrors -d + +# Remove the version deb preference +rm -f $VERSION_DEB_PREFERENCE diff --git a/src/sonic-build-hooks/scripts/pre_run_buildinfo b/src/sonic-build-hooks/scripts/pre_run_buildinfo index 5a8f00b55ecb..78e9fbc61690 100755 --- a/src/sonic-build-hooks/scripts/pre_run_buildinfo +++ b/src/sonic-build-hooks/scripts/pre_run_buildinfo @@ -1,19 +1,48 @@ #!/bin/bash +IMAGENAME=$1 + . /usr/local/share/buildinfo/scripts/buildinfo_base.sh +set -x + [ -d $DIFF_VERSION_PATH ] && rm -rf $DIFF_VERSION_PATH mkdir -p $DIFF_VERSION_PATH mkdir -p $BUILD_VERSION_PATH mkdir -p $LOG_PATH [ -d $PRE_VERSION_PATH ] && rm -rf $PRE_VERSION_PATH -[ -d $POST_VERSION_PATH ] && rm -rf $POST_VERSION_PATH -mkdir -p $POST_VERSION_PATH collect_version_files $PRE_VERSION_PATH +set +x update_version_files +set -x symlink_build_hooks -set_reproducible_mirrors + +mkdir -p /var/cache/apt/archives/ +mkdir -p ${PKG_CACHE_PATH}/deb/ +mkdir -p ${PKG_CACHE_PATH}/pip/ +mkdir -p ${PKG_CACHE_PATH}/web/ +mkdir -p ${PKG_CACHE_PATH}/git/ +mkdir -p ${PKG_CACHE_PATH}/go/ +mkdir -p ${PKG_CACHE_PATH}/docker/ +mkdir -p ${PKG_CACHE_PATH}/.cache/ +mkdir -p ${PKG_CACHE_PATH}/apt + +if [ ! -z "$(get_version_cache_option)" ]; then + # Skip the deletion of cache files + cat <<-EOF >/etc/apt/apt.conf.d/docker-clean + DPkg::Post-Invoke { "test -f /usr/bin/rsync && rsync -avzh --ignore-errors /var/cache/apt/archives/ ${PKG_CACHE_PATH}/deb/; rm -f /var/cache/apt/archives/partial/*.deb /var/cache/apt/*.bin || true"; }; + APT::Update::Post-Invoke { "test -f /usr/bin/rsync && rsync -avzh --ignore-errors /var/cache/apt/archives/ ${PKG_CACHE_PATH}/deb/; rm -f /var/cache/apt/archives/partial/*.deb /var/cache/apt/*.bin || true"; }; + APT::Keep-Downloaded-Packages "true"; + EOF + if [ -f ${PKG_CACHE_FILE_NAME} ]; then + tar -C ${PKG_CACHE_PATH} -xvf ${PKG_CACHE_FILE_NAME} + test -e ${PKG_CACHE_PATH}/deb && cp ${PKG_CACHE_PATH}/deb/* /var/cache/apt/archives/ + mkdir -p /var/lib/apt/lists/; chmod 755 -R /var/lib/apt/lists/ + test -e ${PKG_CACHE_PATH}/apt && cp ${PKG_CACHE_PATH}/apt/* /var/lib/apt/lists/ + cp -r ${PKG_CACHE_PATH}/.cache ~/. + fi +fi chmod -R a+rw $BUILDINFO_PATH @@ -21,4 +50,6 @@ if [ "$ENABLE_VERSION_CONTROL_DEB" == "y" ] && [ -f $VERSION_DEB_PREFERENCE ]; t cp -f $VERSION_DEB_PREFERENCE /etc/apt/preferences.d/ fi +DISTRO=${DISTRO} apt-get update && apt-get install -y rsync + exit 0 diff --git a/src/sonic-build-hooks/scripts/utils.sh b/src/sonic-build-hooks/scripts/utils.sh new file mode 100644 index 000000000000..de5653253b17 --- /dev/null +++ b/src/sonic-build-hooks/scripts/utils.sh @@ -0,0 +1,37 @@ +#!/bin/bash + +# Lock macro for shared file access +# Parameters: +# $(1) - Lock file name +# $(2) - Timeout value +function FLOCK() +{ + local filename=$(dirname $1) + local timeout=${2:-360} + if [[ ! -f ${filename}.flock ]]; then + touch ${filename}.flock + chmod -f 777 ${filename}.flock; + fi + local lockname=$(basename ${filename}) + local lock_fd=lock_${lockname//[%.\/\-+~]/_}_fd + eval $(echo exec {${lock_fd}}\<\>"${filename}.flock") + #echo ${!lock_fd} + if ! flock -x -w ${timeout} "${!lock_fd}" ; then + echo "ERROR: Lock timeout trying to access ${filename}.flock"; + exit 1; + fi + #echo "Lock acquired .." +} + +# UnLock macro for shared file access +# Parameters: +# $(1) - Lock file name +function FUNLOCK() +{ + local filename=$(dirname $1) + local lockname=$(basename ${filename}) + local lock_fd=lock_${lockname//[%.\/\-+~]/_}_fd + eval $(echo exec "${!lock_fd}<&-") + #rm -f ${filename}.flock +} +