Skip to content
This repository has been archived by the owner on Nov 25, 2024. It is now read-only.

Update docs #36

Merged
merged 30 commits into from
Jul 12, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
30 commits
Select commit Hold shift + click to select a range
303d444
mount /dev/shm inside container in test_cpp and enable cpp tests
dongxuy04 Jun 25, 2023
6ed5daa
Replace POSIX shm func with System V shm func to get rid of shm-size …
dongxuy04 Jun 25, 2023
5fa8ab5
added need for pytorch
dongxuy04 Jun 25, 2023
a2bcbaf
fix style
dongxuy04 Jun 25, 2023
bd78bb6
use pytorch-cuda for dependency and disable append_unique for cpp test
dongxuy04 Jun 26, 2023
ff7e41c
update dependency and log for aarch64
dongxuy04 Jun 26, 2023
e4797ad
update dependency
dongxuy04 Jun 26, 2023
885ad2a
disable graph_csr_add_self_loop_test for cpp
dongxuy04 Jun 26, 2023
8ffecb7
disable wholegraph_csr_unweighted_sample_without_replacement_tests fo…
dongxuy04 Jun 26, 2023
d02d0dd
debug aarch64 cpp test
dongxuy04 Jun 26, 2023
74289e7
debug aarch64 cpp test, enable all
dongxuy04 Jun 26, 2023
06c5aea
revert change and retest all, enable all
dongxuy04 Jun 26, 2023
964bd6e
revert change and retest all, enable all
dongxuy04 Jun 26, 2023
4bae876
debug append unique op
hello-11 Jun 26, 2023
15203c8
adjust code style
hello-11 Jun 27, 2023
fc089bb
disable python test for aarch64 for no pytorch cuda support
dongxuy04 Jun 27, 2023
9d72893
remove logs
dongxuy04 Jun 27, 2023
65e6f7f
fix comm test
dongxuy04 Jun 27, 2023
0abfe54
Merge branch 'rapidsai:refactoring' into refactoring
dongxuy04 Jul 7, 2023
f4474ef
Update docs for wholegraph
dongxuy04 Jul 9, 2023
8db8846
update dependencies.yaml, adding breathe dependency
dongxuy04 Jul 9, 2023
49264a5
debug doc build
dongxuy04 Jul 10, 2023
5a66a05
debug doc build
dongxuy04 Jul 11, 2023
170f8d5
debug doc build
dongxuy04 Jul 11, 2023
78a1567
debug docs build, comment out
dongxuy04 Jul 11, 2023
1e95757
debug docs build, module name
dongxuy04 Jul 11, 2023
6d46f0a
debug docs build, add autodoc_mock_imports
dongxuy04 Jul 11, 2023
80a1410
fix docs build by adding libcuda.so.1 dependencies to autodoc_mock_im…
dongxuy04 Jul 11, 2023
0d70e0a
add docs upload
dongxuy04 Jul 11, 2023
80694b9
fix ./build.sh docs
dongxuy04 Jul 12, 2023
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 16 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
FROM nvcr.io/nvidia/pytorch:22.10-py3

RUN apt update && DEBIAN_FRONTEND=noninteractive apt install -y lsb-core software-properties-common wget libspdlog-dev

#RUN remove old cmake to update
RUN conda remove --force -y cmake
RUN rm -rf /usr/local/bin/cmake && rm -rf /usr/local/lib/cmake && rm -rf /usr/lib/cmake

RUN apt-key adv --fetch-keys https://apt.kitware.com/keys/kitware-archive-latest.asc && \
export LSB_CODENAME=$(lsb_release -cs) && \
apt-add-repository -y "deb https://apt.kitware.com/ubuntu/ ${LSB_CODENAME} main" && \
apt update && apt install -y cmake

# update py for pytest
RUN pip3 install -U py
RUN pip3 install Cython setuputils3 scikit-build nanobind pytest-forked pytest
25 changes: 19 additions & 6 deletions build.sh
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ HELP="$0 [<target> ...] [<flag> ...]

libwholegraph build dir is: ${LIBWHOLEGRAPH_BUILD_DIR}

Set env var LIBWHOLEGRAPH_BUILD_DIR to override libcugraph build dir.
Set env var LIBWHOLEGRAPH_BUILD_DIR to override libwholegraph build dir.
"
LIBWHOLEGRAPH_BUILD_DIR=${LIBWHOLEGRAPH_BUILD_DIR:=${REPODIR}/cpp/build}

Expand All @@ -83,6 +83,7 @@ BUILD_TYPE=Release
BUILD_ALL_GPU_ARCH=0
INSTALL_TARGET="--target install"
PYTHON=${PYTHON:-python}
DOCS_BUILD_DIR=build

# Set defaults for vars that may not have been defined externally
# FIXME: if INSTALL_PREFIX is not set, check PREFIX, then check
Expand All @@ -103,7 +104,7 @@ function buildAll {

function cleanPythonDir {
pushd $1 > /dev/null
rm -rf dist wholegraph/raft *.egg-info
rm -rf dist *.egg-info
find . -type d -name __pycache__ -print | xargs rm -rf
find . -type d -name _skbuild -print | xargs rm -rf
find . -type d -name _external_repositories -print | xargs rm -rf
Expand Down Expand Up @@ -193,8 +194,8 @@ if hasArg clean; then
# FIXME: ideally the "setup.py clean" command would be used for this, but
# currently running any setup.py command has side effects (eg. cloning repos).
# (cd ${REPODIR}/python && python setup.py clean)
if [[ -d ${REPODIR}/python ]]; then
cleanPythonDir ${REPODIR}/python
if [[ -d ${REPODIR}/python/pylibwholegraph ]]; then
cleanPythonDir ${REPODIR}/python/pylibwholegraph
fi

# If the dirs to clean are mounted dirs in a container, the
Expand All @@ -209,6 +210,12 @@ if hasArg clean; then
done
# remove any left-over cpython shared libraries
find ${REPODIR}/python/pylibwholegraph -name "*.cpython*.so" -type f -delete

# remove docs build
cd ${REPODIR}/docs/wholegraph
make BUILDDIR=${DOCS_BUILD_DIR} clean
rm -rf ${REPODIR}/docs/wholegraph/_xml
rm -rf ${REPODIR}/docs/wholegraph/_html
fi

if hasArg tests; then
Expand Down Expand Up @@ -297,7 +304,13 @@ if hasArg docs; then
${CMAKE_VERBOSE_OPTION}
fi
cd ${LIBWHOLEGRAPH_BUILD_DIR}
cmake --build "${LIBWHOLEGRAPH_BUILD_DIR}" -j${PARALLEL_LEVEL} --target docs_wholegraph ${VERBOSE_FLAG}
cmake --build "${LIBWHOLEGRAPH_BUILD_DIR}" -j${PARALLEL_LEVEL} --target doxygen ${VERBOSE_FLAG}
mkdir -p ${REPODIR}/docs/wholegraph/_html/doxygen_docs/libwholegraph/html
mv ${LIBWHOLEGRAPH_BUILD_DIR}/html/* ${REPODIR}/docs/wholegraph/_html/doxygen_docs/libwholegraph/html
mkdir -p ${REPODIR}/docs/wholegraph/_xml
# _xml is used for sphinx breathe project
mv ${LIBWHOLEGRAPH_BUILD_DIR}/xml/* "${REPODIR}/docs/wholegraph/_xml"
cd ${REPODIR}/docs/wholegraph
make html
PYTHONPATH=${REPODIR}/python/pylibwholegraph:${PYTHONPATH} make BUILDDIR=${DOCS_BUILD_DIR} html
mv ${REPODIR}/docs/wholegraph/_html/doxygen_docs ${REPODIR}/docs/wholegraph/${DOCS_BUILD_DIR}/html/
fi
49 changes: 28 additions & 21 deletions ci/build_docs.sh
Original file line number Diff line number Diff line change
Expand Up @@ -19,31 +19,38 @@ set -u

rapids-print-env

#rapids-logger "Downloading artifacts from previous jobs"
#
#CPP_CHANNEL=$(rapids-download-conda-from-s3 cpp)
#PYTHON_CHANNEL=$(rapids-download-conda-from-s3 python)
#VERSION_NUMBER="23.06"
rapids-logger "Downloading artifacts from previous jobs"

#rapids-mamba-retry install \
# --channel "${CPP_CHANNEL}" \
# --channel "${PYTHON_CHANNEL}" \
#libwholegraph pylibwholegraph
CPP_CHANNEL=$(rapids-download-conda-from-s3 cpp)
PYTHON_CHANNEL=$(rapids-download-conda-from-s3 python)
VERSION_NUMBER="23.08"
export RAPIDS_VERSION_NUMBER=${VERSION_NUMBER}
export RAPIDS_DOCS_DIR="$(mktemp -d)"

rapids-mamba-retry install \
--channel "${CPP_CHANNEL}" \
--channel "${PYTHON_CHANNEL}" \
libwholegraph pylibwholegraph numpy

rapids-logger "Build Doxygen docs"
pushd cpp
doxygen Doxyfile
mkdir -p ../docs/wholegraph/_html/doxygen_docs/libwholegraph/html
mv html/* ../docs/wholegraph/_html/doxygen_docs/libwholegraph/html
mkdir -p ../docs/wholegraph/_xml
# _xml is used for sphinx breathe project
mv xml/* "../docs/wholegraph/_xml"
popd

rapids-logger "Build Sphinx docs"
pushd docs/wholegraph
sphinx-build -b dirhtml ./source _html
sphinx-build -b text ./source _text
mkdir -p "${RAPIDS_DOCS_DIR}/wholegraph/"{html,txt}
mv _html/* "${RAPIDS_DOCS_DIR}/wholegraph/html"
mv _text/* "${RAPIDS_DOCS_DIR}/wholegraph/txt"
popd

#rapids-logger "Build Sphinx docs"
#pushd docs
#sphinx-build -b dirhtml ./source html -W
#sphinx-build -b text ./source text -W
#popd
#
#if [[ "${RAPIDS_BUILD_TYPE}" == "branch" ]]; then
# rapids-logger "Upload Docs to S3"
# aws s3 sync --no-progress --delete cpp/html "s3://rapidsai-docs/libwholegraph/${VERSION_NUMBER}/html"
# aws s3 sync --no-progress --delete docs/html "s3://rapidsai-docs/pylibwholegraph/${VERSION_NUMBER}/html"
# aws s3 sync --no-progress --delete docs/text "s3://rapidsai-docs/pylibwholegraph/${VERSION_NUMBER}/txt"
#fi
rapids-logger "Output temp dir: ${RAPIDS_DOCS_DIR}"

rapids-upload-docs
1 change: 1 addition & 0 deletions conda/environments/all_cuda-118_arch-x86_64.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ channels:
- conda-forge
- nvidia
dependencies:
- breathe
- c-compiler
- clang-tools=16.0.0
- clangxx=16.0.0
Expand Down
31 changes: 10 additions & 21 deletions cpp/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -194,17 +194,6 @@ if(BUILD_TESTS AND CMAKE_PROJECT_NAME STREQUAL PROJECT_NAME)
add_subdirectory(tests)
endif()

##############################################################################
# - doxygen docs -------------------------------------------------------------

include(./cmake/doxygen.cmake)
add_doxygen_target(
IN_DOXYFILE Doxyfile.in
OUT_DOXYFILE ${CMAKE_CURRENT_BINARY_DIR}/Doxyfile
OUT_DIR ${CMAKE_CURRENT_BINARY_DIR}/html
CWD ${CMAKE_CURRENT_BINARY_DIR}
DEP_TARGETS ${DOXYGEN_DEP_TARGETS})

##############################################################################
# - code checker -------------------------------------------------------------

Expand Down Expand Up @@ -286,15 +275,15 @@ rapids_export(
################################################################################
# - make documentation ---------------------------------------------------------
# requires doxygen and graphviz to be installed
# from build directory, run make docs_wholegraph
# from build directory, run make doxygen

# doc targets for cugraph
find_package(Doxygen 1.8.11)
if(Doxygen_FOUND)
add_custom_command(OUTPUT WHOLEGRAPH_DOXYGEN
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/doxygen
COMMAND doxygen Doxyfile
VERBATIM)
##############################################################################
# - doxygen docs -------------------------------------------------------------

add_custom_target(docs_wholegraph DEPENDS WHOLEGRAPH_DOXYGEN)
endif()
include(./cmake/doxygen.cmake)
add_doxygen_target(
IN_DOXYFILE Doxyfile.in
OUT_DOXYFILE ${CMAKE_CURRENT_BINARY_DIR}/Doxyfile
OUT_DIR ${CMAKE_CURRENT_BINARY_DIR}/html
CWD ${CMAKE_CURRENT_BINARY_DIR}
DEP_TARGETS ${DOXYGEN_DEP_TARGETS})
4 changes: 2 additions & 2 deletions cpp/Doxyfile
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ PROJECT_NAME = "WholeGraph C API"
# could be handy for archiving the generated documentation or if some version
# control system is used.

PROJECT_NUMBER = 23.06
PROJECT_NUMBER = 23.08

# Using the PROJECT_BRIEF tag one can provide an optional one line description
# for a project that appears at the top of each page and should give viewer a
Expand Down Expand Up @@ -1988,7 +1988,7 @@ MAN_LINKS = NO
# captures the structure of the code including all documentation.
# The default value is: NO.

GENERATE_XML = NO
GENERATE_XML = YES

# The XML_OUTPUT tag is used to specify where the XML pages will be put. If a
# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
Expand Down
2 changes: 1 addition & 1 deletion cpp/Doxyfile.in
Original file line number Diff line number Diff line change
Expand Up @@ -1988,7 +1988,7 @@ MAN_LINKS = NO
# captures the structure of the code including all documentation.
# The default value is: NO.

GENERATE_XML = NO
GENERATE_XML = YES

# The XML_OUTPUT tag is used to specify where the XML pages will be put. If a
# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
Expand Down
39 changes: 31 additions & 8 deletions cpp/include/wholememory/embedding.h
Original file line number Diff line number Diff line change
Expand Up @@ -22,24 +22,47 @@
extern "C" {
#endif

/**
* @brief Opaque handle to WholeMemory Embedding Cache Policy
*
* An Opaque handle to WholeMemory Embedding Cache Policy
*/
typedef struct wholememory_embedding_cache_policy_* wholememory_embedding_cache_policy_t;

/**
* @brief Opaque handle to WholeMemory Embedding Optimizer
*
* An Opaque handle to WholeMemory Embedding Optimizer
*/
typedef struct wholememory_embedding_optimizer_* wholememory_embedding_optimizer_t;

/**
* @brief Opaque handle to WholeMemory Embedding
*
* An Opaque handle to WholeMemory Embedding
*/
typedef struct wholememory_embedding_* wholememory_embedding_t;

/**
* @enum wholememory_access_type_t
* @brief defines access type of WholeMemory Embedding
*/
enum wholememory_access_type_t {
WHOLEMEMORY_AT_NONE = 0,
WHOLEMEMORY_AT_READONLY,
WHOLEMEMORY_AT_READWRITE,
WHOLEMEMORY_AT_NONE = 0, /*!< Not defined */
WHOLEMEMORY_AT_READONLY, /*!< Only have readonly access to the WholeMemory */
WHOLEMEMORY_AT_READWRITE, /*!< May have write access to the WholeMemory */
};

/**
* @enum wholememory_optimizer_type_t
* @brief defines optimizer type for WholeMemory Embedding
*/
enum wholememory_optimizer_type_t {
WHOLEMEMORY_OPT_NONE = 0,
WHOLEMEMORY_OPT_SGD,
WHOLEMEMORY_OPT_LAZY_ADAM,
WHOLEMEMORY_OPT_RMSPROP,
WHOLEMEMORY_OPT_ADAGRAD,
WHOLEMEMORY_OPT_NONE = 0, /*!< No optimizer needed */
WHOLEMEMORY_OPT_SGD, /*!< Use SGD optimizer */
WHOLEMEMORY_OPT_LAZY_ADAM, /*!< Use Lazy Adam optimizer */
WHOLEMEMORY_OPT_RMSPROP, /*!< Use RMSProp optimizer */
WHOLEMEMORY_OPT_ADAGRAD, /*!< Use AdaGrad optimizer */
};

/**
Expand Down
3 changes: 3 additions & 0 deletions cpp/include/wholememory/global_reference.h
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,9 @@ extern "C" {

/**
* @brief Global reference of a WholeMemory object
*
* A global reference is for Continuous of Chunked WholeMemory Type, in these types, each rank can
* directly access all memory from all ranks. The global reference is used to do this direct access.
*/
struct wholememory_gref_t {
void* pointer; /*!< pointer to data for CONTINUOUS WholeMemory or pointer to data pointer array
Expand Down
46 changes: 23 additions & 23 deletions cpp/include/wholememory/tensor_description.h
Original file line number Diff line number Diff line change
Expand Up @@ -27,16 +27,16 @@ extern "C" {
* @brief defines WholeMemory data type for tensors
*/
enum wholememory_dtype_t {
WHOLEMEMORY_DT_UNKNOWN = 0,
WHOLEMEMORY_DT_FLOAT,
WHOLEMEMORY_DT_HALF,
WHOLEMEMORY_DT_DOUBLE,
WHOLEMEMORY_DT_BF16,
WHOLEMEMORY_DT_INT,
WHOLEMEMORY_DT_INT64,
WHOLEMEMORY_DT_INT16,
WHOLEMEMORY_DT_INT8,
WHOLEMEMORY_DT_COUNT,
WHOLEMEMORY_DT_UNKNOWN = 0, /*!< Unknown type */
WHOLEMEMORY_DT_FLOAT, /*!< 32-bit float type */
WHOLEMEMORY_DT_HALF, /*!< 16-bit half float type */
WHOLEMEMORY_DT_DOUBLE, /*!< 64-bit double type */
WHOLEMEMORY_DT_BF16, /*!< 16-bit bfloat type */
WHOLEMEMORY_DT_INT, /*!< 32-bit signed integer type */
WHOLEMEMORY_DT_INT64, /*!< 64-bit signed integer type */
WHOLEMEMORY_DT_INT16, /*!< 16-bit signed integer type */
WHOLEMEMORY_DT_INT8, /*!< 8-bit signed integer type */
WHOLEMEMORY_DT_COUNT, /*!< total count if types */
};

/**
Expand Down Expand Up @@ -67,20 +67,20 @@ bool wholememory_dtype_is_integer_number(wholememory_dtype_t dtype);
* @brief wrapper for array in WholeMemory
*/
struct wholememory_array_description_t {
int64_t size;
int64_t storage_offset; /* offset in number of elements, NOT in bytes. */
wholememory_dtype_t dtype;
int64_t size; /*!< size of the array in elements. */
int64_t storage_offset; /*!< offset in number of elements, NOT in bytes. */
wholememory_dtype_t dtype; /*!< data type of the array */
};

/**
* @struct wholememory_matrix_description_t
* @brief wrapper for matrix in WholeMemory
*/
struct wholememory_matrix_description_t {
int64_t sizes[2]; /* sizes[0] is row of the matrix, sizes[1] is column of the matrix */
int64_t stride; /* stride of first dimension, in number of elements */
int64_t storage_offset; /* offset in number of elements, NOT in bytes. */
wholememory_dtype_t dtype;
int64_t sizes[2]; /*!< sizes[0] is row of the matrix, sizes[1] is column of the matrix */
int64_t stride; /*!< stride of first dimension, in number of elements */
int64_t storage_offset; /*!< offset in number of elements, NOT in bytes. */
wholememory_dtype_t dtype; /*!< data type of the matrix */
};

#define WHOLEMEMORY_MAX_TENSOR_DIM (8)
Expand All @@ -90,12 +90,12 @@ struct wholememory_matrix_description_t {
* @brief Tensor description in WholeMemory, dimension 0 is the slowest changed dimension
*/
struct wholememory_tensor_description_t {
int64_t sizes[WHOLEMEMORY_MAX_TENSOR_DIM]; /* size of each dimension of the tensor, in number of
elements */
int64_t strides[WHOLEMEMORY_MAX_TENSOR_DIM]; /* stride of the tensor, in number of elements */
int64_t storage_offset; /* offset in number of elements, NOT in bytes. */
int dim;
wholememory_dtype_t dtype;
int64_t sizes[WHOLEMEMORY_MAX_TENSOR_DIM]; /*!< size of each dimension of the tensor, in number of
elements */
int64_t strides[WHOLEMEMORY_MAX_TENSOR_DIM]; /*!< stride of the tensor, in number of elements */
int64_t storage_offset; /*!< offset in number of elements, NOT in bytes. */
int dim; /*!< dim of the tensor */
wholememory_dtype_t dtype; /*!< data type of the tensor */
};

/*!
Expand Down
Loading