From 1bd1e9cd41730d1643c80d0cfe4c99ca13fb110c Mon Sep 17 00:00:00 2001 From: Jacky Romano Date: Sun, 6 Mar 2022 11:22:44 +0000 Subject: [PATCH] CI flow fixes This patch includes several build fixes: - Add missing xiphos-tools/table_info files - CI build flow The CI build flow has been modified and now persists on 3 steps: - **build_docker** : Builds a container that includes all the build dependencies. Among those dependencies the docker include Neuroblade software release that is downloaded from Intel's internal artifactory (ubit). The result docker is pushed into local docker registry running on dbio-dev-build1:5000 for next stages - **build** : Builds the software. The result artifacts are generated into /artifacts directory and uploaded to github's artifactory to be used by later stages. - **test** : runs on self-runner with Xiphos HW. This steps downloads the artifacts from the previous stage and run tests on them. Currently there are two set of tests: - xiphos-datasource tests - native layer tests running via plan_tester utility --- .github/workflows/compile_and_test.yml | 35 +++++++-- README.md | 1 - tools/build_arrow.sh | 2 +- tools/build_gazelle_jni.sh | 13 +++- tools/dockers/build_docker/Dockerfile | 41 ++++++----- tools/dockers/build_docker/environment | 4 + tools/dockers/build_docker/neuroblade.list | 2 +- tools/dockers/gen_build_docker.sh | 4 +- tools/patches/.hold | 0 tools/run_tests.sh | 8 +- .../resources/config.yaml | 16 ++++ .../xiphos-datasource/cpp/CMakeLists.txt | 7 +- .../xiphos-datasource/cpp/src/xiphosJNI.cpp | 8 +- .../xiphos-tools/CMakeLists.txt | 8 ++ .../xiphos-tools/makefile | 6 ++ .../xiphos-tools/table_info/CMakeLists.txt | 24 ++++++ .../xiphos-tools/table_info/table_info.cpp | 73 +++++++++++++++++++ 17 files changed, 220 insertions(+), 32 deletions(-) create mode 100644 tools/dockers/build_docker/environment create mode 100644 tools/patches/.hold create mode 100644 xiphos-spark-integration/resources/config.yaml create mode 100644 xiphos-spark-integration/xiphos-tools/CMakeLists.txt create mode 100644 xiphos-spark-integration/xiphos-tools/makefile create mode 100644 xiphos-spark-integration/xiphos-tools/table_info/CMakeLists.txt create mode 100644 xiphos-spark-integration/xiphos-tools/table_info/table_info.cpp diff --git a/.github/workflows/compile_and_test.yml b/.github/workflows/compile_and_test.yml index ffd4a7e195fe..398a63a5baf1 100644 --- a/.github/workflows/compile_and_test.yml +++ b/.github/workflows/compile_and_test.yml @@ -7,7 +7,9 @@ on: jobs: build_docker: - runs-on: [self-hosted] + runs-on: [self-hosted, docker] + env: + ARTIFACTORY_SERVER: localhost:5000 outputs: image_name: localhost:5000/gazelle-jni-ci-image:latest container: @@ -17,14 +19,18 @@ jobs: steps: - name: Check out repository code uses: actions/checkout@v2 - - run: docker build -t localhost:5000/gazelle-jni-ci-image:latest tools/dockers/build_docker - - run: docker push localhost:5000/gazelle-jni-ci-image:latest + #- run: echo ${{secrets.ARTIFACTORY_REGISTRY_TOKEN}} | docker login --username query_dev --password-stdin $ARTIFACTORY_SERVER + - run: docker build -t ${ARTIFACTORY_SERVER}/gazelle-jni-ci-image:latest tools/dockers/build_docker + - run: docker push ${ARTIFACTORY_SERVER}/gazelle-jni-ci-image:latest build: - runs-on: [self-hosted] + runs-on: [self-hosted, docker] needs: build_docker container: image: ${{needs.build_docker.outputs.image_name}} + credentials: + username: query_dev + password: ${{secrets.ARTIFACTORY_REGISTRY_TOKEN}} env: WORKSPACE : ${{github.workspace}} steps: @@ -33,5 +39,24 @@ jobs: - uses: jfrog/setup-jfrog-cli@v2 - run: jfrog --version - run: tools/build_gazelle_jni.sh --batch + - name: Archive images + uses: actions/upload-artifact@v3 + with: + name: "xiphos-artifacts" + path: artifacts + test: + runs-on: [self-hosted, xiphos-hw] + needs: build + env: + WORKSPACE : ${{github.workspace}} + steps: + - name: Check out repository code + uses: actions/checkout@v2 + - run: ls -l + - uses: actions/download-artifact@v3 + name: Download artifcts + with: + name: "xiphos-artifacts" + path: artifacts + - run: ls -l artifacts - run: tools/run_tests.sh - diff --git a/README.md b/README.md index 9b670d4cf32e..d9919bf32664 100644 --- a/README.md +++ b/README.md @@ -81,4 +81,3 @@ After Gazelle-Jni being successfully deployed in your environment, if you would # Contact rui.mo@intel.com; binwei.yang@intel.com -xxx diff --git a/tools/build_arrow.sh b/tools/build_arrow.sh index 1a7eb05568c7..12d055be496c 100755 --- a/tools/build_arrow.sh +++ b/tools/build_arrow.sh @@ -94,7 +94,7 @@ if [ $BUILD_ARROW == "ON" ]; then -DCMAKE_INSTALL_PREFIX=${ARROW_INSTALL_DIR} \ -DCMAKE_INSTALL_LIBDIR=lib - make -j$NPROC + make -j$NPROC make install cd java diff --git a/tools/build_gazelle_jni.sh b/tools/build_gazelle_jni.sh index c1288b63e574..c85d7b2285bb 100755 --- a/tools/build_gazelle_jni.sh +++ b/tools/build_gazelle_jni.sh @@ -34,5 +34,16 @@ done pushd $root -mvn $DO_CLEAN package $BATCH_MODE -P full-scala-compiler -Dbuild_arrow=${BUILD_ARROW} -Dbuild_cpp=ON -Dclean_cpp=${CLEAN_CPP} -DskipTests -Dcheckstyle.skip +if [ "$DO_CLEAN" == "clean" ]; then + mvn clean $BATCH_MODE -P full-scala-compiler -Dbuild_arrow=${BUILD_ARROW} -Dbuild_cpp=ON -Dclean_cpp=${CLEAN_CPP} -DskipTests -Dcheckstyle.skip +fi + +mvn package $BATCH_MODE -P full-scala-compiler -Dbuild_arrow=${BUILD_ARROW} -Dbuild_cpp=ON -Dclean_cpp=${CLEAN_CPP} -DskipTests -Dcheckstyle.skip + +artifacts_dir=artifacts +mkdir -p $artifacts_dir +cp jvm/target/*.jar $artifacts_dir +cp cpp/build/src/plan_tester/plan_tester $artifacts_dir +cp -r tests $artifacts_dir +cp xiphos-spark-integration/resources/config.yaml $artifacts_dir popd diff --git a/tools/dockers/build_docker/Dockerfile b/tools/dockers/build_docker/Dockerfile index 69067b71f26c..57ab8dd32515 100644 --- a/tools/dockers/build_docker/Dockerfile +++ b/tools/dockers/build_docker/Dockerfile @@ -10,22 +10,23 @@ ENV MAVEN_OPTS="-Dhttp.proxyHost=proxy-chain.intel.com -Dhttp.proxyPort=911 -Dht RUN echo 'Acquire::http::Proxy "http://proxy-dmz.intel.com:911/"; Acquire::https::Proxy "http://proxy-dmz.intel.com:912/";' > /etc/apt/apt.conf.d/proxy # Update the base image OS and install wget and python -RUN apt-get update -RUN apt-get install apt-utils -RUN apt-get install -y wget python less numactl sudo vim -RUN apt-get install -y openjdk-8-jdk -RUN apt-get install -y maven llvm-dev llvm-runtime clang clang-format -RUN apt-get install -y build-essential -RUN apt-get install -y git -RUN apt-get install -y cmake -RUN apt-get install -y libssl-dev libcurl4 libcurl4-openssl-dev zlib1g-dev zlib1g curl -RUN apt-get install -y libboost-dev -RUN apt-get install -y tig -RUN apt-get install -y ccache -RUN apt-get install -y htop -RUN apt-get install -y ctags -RUN apt-get install -y bash-completion -RUN apt-get install -y software-properties-common +RUN DEBIAN_FRONTEND=noninteractive apt-get update +RUN DEBIAN_FRONTEND=noninteractive apt-get install apt-utils +RUN DEBIAN_FRONTEND=noninteractive apt-get install -y wget python less numactl sudo vim +RUN DEBIAN_FRONTEND=noninteractive apt-get install -y openjdk-8-jdk +RUN DEBIAN_FRONTEND=noninteractive apt-get install -y maven llvm-dev llvm-runtime clang clang-format +RUN DEBIAN_FRONTEND=noninteractive apt-get install -y build-essential +RUN DEBIAN_FRONTEND=noninteractive apt-get install -y git +RUN DEBIAN_FRONTEND=noninteractive apt-get install -y cmake +RUN DEBIAN_FRONTEND=noninteractive apt-get install -y libssl-dev libcurl4 libcurl4-openssl-dev zlib1g-dev zlib1g curl +RUN DEBIAN_FRONTEND=noninteractive apt-get install -y libboost-dev +RUN DEBIAN_FRONTEND=noninteractive apt-get install -y tig +RUN DEBIAN_FRONTEND=noninteractive apt-get install -y ccache +RUN DEBIAN_FRONTEND=noninteractive apt-get install -y htop +RUN DEBIAN_FRONTEND=noninteractive apt-get install -y ctags +RUN DEBIAN_FRONTEND=noninteractive apt-get install -y bash-completion +RUN DEBIAN_FRONTEND=noninteractive apt-get install -y software-properties-common +RUN DEBIAN_FRONTEND=noninteractive apt-get install -y software-properties-common RUN echo "installating NB dependencies" RUN wget -O - https://apt.kitware.com/keys/kitware-archive-latest.asc 2>/dev/null | gpg --dearmor - | sudo tee /usr/share/keyrings/kitware-archive-keyring.gpg >/dev/null @@ -40,10 +41,16 @@ RUN update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-10 100 --slave / RUN update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-9 90 --slave /usr/bin/g++ g++ /usr/bin/g++-9 --slave /usr/bin/gcov gcov /usr/bin/gcov-9 RUN update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-8 80 --slave /usr/bin/g++ g++ /usr/bin/g++-8 --slave /usr/bin/gcov gcov /usr/bin/gcov-8 # ignore installation errors for now -RUN apt install -y nb-release || : +RUN DEBIAN_FRONTEND=noninteractive apt-get install -y linux-headers-5.8.0-43-generic +# install neuroblade release. Change to pointer to use other releases +RUN curl -uquery_dev:AP3pvecE8pWZbwtNuC7DFXBsuRif3Wi3GWvMurPZUfKZaxTcxUXZ1vvMuVuM -O "https://ubit-artifactory-il.intel.com/artifactory/queryspark-il-local/nb-releases/220315-c/release.tgz" +RUN mkdir nb-release +RUN tar zxf release.tgz -C nb-release +RUN DEBIAN_FRONTEND=noninteractive nb-release/install.sh # Enable no passwd for sudo COPY sudoers /etc/sudoers +COPY environment /etc/environment # select java version ENV JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64 diff --git a/tools/dockers/build_docker/environment b/tools/dockers/build_docker/environment new file mode 100644 index 000000000000..f4e060bc12eb --- /dev/null +++ b/tools/dockers/build_docker/environment @@ -0,0 +1,4 @@ +PATH="/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/usr/local/games:/snap/bin" +http_proxy=http://proxy.iil.intel.com:911 +https_proxy=http://proxy.iil.intel.com:912 +no_proxy=intel.com,.intel.com,localhost diff --git a/tools/dockers/build_docker/neuroblade.list b/tools/dockers/build_docker/neuroblade.list index b9c71d0865b7..06e611f864e9 100644 --- a/tools/dockers/build_docker/neuroblade.list +++ b/tools/dockers/build_docker/neuroblade.list @@ -1,3 +1,3 @@ deb [signed-by=/usr/share/keyrings/kitware-archive-keyring.gpg] https://apt.kitware.com/ubuntu/ focal main deb http://apt.llvm.org/focal/ llvm-toolchain-focal-12 main -deb https://download.neuroblade.ai/debian/ focal main +deb https://download.neuroblade.ai/debian/ focal 1.5 diff --git a/tools/dockers/gen_build_docker.sh b/tools/dockers/gen_build_docker.sh index 6f249e56f060..159d8ef37a83 100755 --- a/tools/dockers/gen_build_docker.sh +++ b/tools/dockers/gen_build_docker.sh @@ -1,6 +1,6 @@ #!/bin/bash mydir=$(realpath $(dirname $0)) -docker build -t dbio-runner-vm1.iil.intel.com:5000/gazelle-jni-ci-image:latest $mydir/build_docker -docker push dbio-runner-vm1.iil.intel.com:5000/gazelle-jni-ci-image:latest +docker build -t dbio-dev-build1.iil.intel.com:5000/gazelle-jni-ci-image:latest $mydir/build_docker +docker push dbio-dev-build1.iil.intel.com:5000/gazelle-jni-ci-image:latest diff --git a/tools/patches/.hold b/tools/patches/.hold new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/tools/run_tests.sh b/tools/run_tests.sh index 6df7ca1c7576..fc4d9eed1ce9 100755 --- a/tools/run_tests.sh +++ b/tools/run_tests.sh @@ -2,9 +2,14 @@ set -e + +export MAVEN_OPTS="-Dhttp.proxyHost=proxy-chain.intel.com -Dhttp.proxyPort=911 -Dhttp.nonProxyHosts=ā€¯localhost|*.intel.com -Dhttps.proxyHost=proxy-chain.intel.com -Dhttps.proxyPort=912" + mydir=$(realpath $(dirname $0)) root=$mydir/.. +export DAXL_CONFIG_FILE=$root/artifacts/config.yaml + function test_xiphos_datasource () { cd $root/xiphos-spark-integration/xiphos-datasource mvn test @@ -14,7 +19,8 @@ function test_xiphos_datasource () { test_xiphos_datasource # basic plan_tester test to ensure that it doesn't crash on simple test -$root/cpp/build/src/plan_tester/plan_tester $root/tests/plan1.substrait +$root/artifacts/plan_tester $root/tests/plan1.substrait +# todo - add end-to-end gazelle tests echo "Tests PASSED" exit 0 diff --git a/xiphos-spark-integration/resources/config.yaml b/xiphos-spark-integration/resources/config.yaml new file mode 100644 index 000000000000..335344256d19 --- /dev/null +++ b/xiphos-spark-integration/resources/config.yaml @@ -0,0 +1,16 @@ +{ + se : { + host : localhost, + port : 50052 + }, + + planner : { + host : localhost, + port : 6568 + }, + + master : { + host : localhost, + port : 6568 + } +} \ No newline at end of file diff --git a/xiphos-spark-integration/xiphos-datasource/cpp/CMakeLists.txt b/xiphos-spark-integration/xiphos-datasource/cpp/CMakeLists.txt index 447c72840241..4756c699646d 100644 --- a/xiphos-spark-integration/xiphos-datasource/cpp/CMakeLists.txt +++ b/xiphos-spark-integration/xiphos-datasource/cpp/CMakeLists.txt @@ -9,8 +9,11 @@ set (JNI_HEADER ${JNI_HEADER_DIR}/com_intel_dbio_sources_datasourcev2_xiphosv2_X set (JNI_SOURCE ${CMAKE_CURRENT_SOURCE_DIR}/../src/main/java/com/intel/dbio/sources/datasourcev2/xiphosv2/XiphosJniImp.java) set (TARGET_DIR ${CMAKE_CURRENT_SOURCE_DIR}/../target) -find_package(Arrow CONFIG REQUIRED) -message(STATUS "Using arrow ${ARROW_VERSION}") +#find_package(Arrow CONFIG REQUIRED) +#message(STATUS "Using arrow ${ARROW_VERSION}") + +include_directories(/usr/neuroblade/nb-daxl/include) +link_directories(/usr/neuroblade/nb-arrow/lib /usr/neuroblade/nb-daxl/lib) set(ARROW_LIBRARIES arrow arrow_flight) find_package(JNI) diff --git a/xiphos-spark-integration/xiphos-datasource/cpp/src/xiphosJNI.cpp b/xiphos-spark-integration/xiphos-datasource/cpp/src/xiphosJNI.cpp index db823c80195d..7e74e2175a2d 100644 --- a/xiphos-spark-integration/xiphos-datasource/cpp/src/xiphosJNI.cpp +++ b/xiphos-spark-integration/xiphos-datasource/cpp/src/xiphosJNI.cpp @@ -43,7 +43,13 @@ JNIEXPORT jboolean JNICALL Java_com_intel_dbio_sources_datasourcev2_xiphosv2_Xip { std::cout << "JNI init\n"; - daxl::Daxl::getInstance()->init(); + if (getenv("DAXL_CONFIG_FILE") == nullptr) { + std::cerr << "ERROR: DAXL_CONFIG_FILE env variable is not defined !!" << std::endl; + return false; + } + std::string configPath(getenv("DAXL_CONFIG_FILE")); + + daxl::Daxl::getInstance()->init(configPath, daxl::Role::WORKER); if (verbose) { std::cout << "DAXL initialized\n"; } diff --git a/xiphos-spark-integration/xiphos-tools/CMakeLists.txt b/xiphos-spark-integration/xiphos-tools/CMakeLists.txt new file mode 100644 index 000000000000..3794f36e9df9 --- /dev/null +++ b/xiphos-spark-integration/xiphos-tools/CMakeLists.txt @@ -0,0 +1,8 @@ +cmake_minimum_required(VERSION 3.12.4) + +project(xiphos-tools) + +set(CMAKE_EXPORT_COMPILE_COMMANDS ON) + +add_subdirectory(table_info) + diff --git a/xiphos-spark-integration/xiphos-tools/makefile b/xiphos-spark-integration/xiphos-tools/makefile new file mode 100644 index 000000000000..a761b6022dac --- /dev/null +++ b/xiphos-spark-integration/xiphos-tools/makefile @@ -0,0 +1,6 @@ +all: + mkdir -p build && cd build && cmake .. -DCMAKE_BUILD_TYPE=RelWithDebInfo && cd .. + $(MAKE) -C build $@ + +%: + $(MAKE) -C build $@ diff --git a/xiphos-spark-integration/xiphos-tools/table_info/CMakeLists.txt b/xiphos-spark-integration/xiphos-tools/table_info/CMakeLists.txt new file mode 100644 index 000000000000..808a0dc6fd61 --- /dev/null +++ b/xiphos-spark-integration/xiphos-tools/table_info/CMakeLists.txt @@ -0,0 +1,24 @@ +cmake_minimum_required(VERSION 3.12.4) + +project( + neuroblade-daxl-samples + VERSION 0.0.1 + DESCRIPTION "neuroblade daxl samples") + +set(CMAKE_EXPORT_COMPILE_COMMANDS ON) + +# language - defaults +set(CXX /usr/lib/clang++-12) +set(CMAKE_CXX_STANDARD 20) +set(CMAKE_CXX_STANDARD_REQUIRED ON) +set(CMAKE_CXX_EXTENSIONS OFF) + +set(ARROW_LIBRARIES arrow arrow_flight) + +include_directories(/usr/neuroblade/nb-daxl/include + /usr/neuroblade/nb-arrow/include) +link_directories(/usr/neuroblade/nb-arrow/lib + /usr/neuroblade/nb-daxl/lib) + +add_executable(table_info table_info.cpp) +target_link_libraries(table_info neuroblade-daxl ${ARROW_LIBRARIES}) diff --git a/xiphos-spark-integration/xiphos-tools/table_info/table_info.cpp b/xiphos-spark-integration/xiphos-tools/table_info/table_info.cpp new file mode 100644 index 000000000000..d79f10848947 --- /dev/null +++ b/xiphos-spark-integration/xiphos-tools/table_info/table_info.cpp @@ -0,0 +1,73 @@ +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +using namespace std; + +void printTableInfo(const string & tableName) +{ + daxl::dataIO::TableManager *tableManger = daxl::dataIO::TableManager::getInstance(); + daxl::DiskTable diskTable(tableName); + + daxl::Status status = tableManger->findTable(tableName, &diskTable); + if (status.isOk()) { + cout << "OK\n"; + } else { + cout << "findTable " << tableName << " failed with << " << status.getErrorMessage() << endl; + } + + auto columns = diskTable.getColumns(); + for (auto const & c : columns) { + cout << "name: " << c.getName() << " type: " << *c.getType() << " scale: " << c.getScale() << " prec: " << c.getPrecision() << endl; + } +} + +void printTablesInfo() +{ + vector tables; + daxl::dataIO::TableManager * mgr = daxl::dataIO::TableManager::getInstance(); + + if (!mgr->getAllTables(&tables).isOk()) { + cerr << "Failed to get all tables\n"; + return; + } + + cout << "Got " << tables.size() << " tables\n"; + + for (auto const & t : tables) { + cout << t.getName() << ":\n"; + printTableInfo(t.getName()); + } + +} + +int main(int argc, char *argv[]) +{ + if (argc < 2) { + cout << "Usage: " << argv[0] << " [table name]" << endl; + exit(1); + } + + daxl::Daxl::getInstance()->init(string(argv[1]), daxl::Role::MASTER); + + if (argc < 3) { + printTablesInfo(); + } else { + printTableInfo(argv[2]); + } + + return 0; +}