diff --git a/reco_utils/__init__.py b/reco_utils/__init__.py index 19ce5b102a..df052a7fad 100644 --- a/reco_utils/__init__.py +++ b/reco_utils/__init__.py @@ -2,7 +2,7 @@ # Licensed under the MIT License. __title__ = "Microsoft Recommenders" -__version__ = "2019.09" +__version__ = "2019.9" __author__ = "RecoDev Team at Microsoft" __license__ = "MIT" __copyright__ = "Copyright 2018-present Microsoft Corporation" diff --git a/scripts/generate_conda_file.py b/scripts/generate_conda_file.py index d8e791918c..7f33e97b9e 100644 --- a/scripts/generate_conda_file.py +++ b/scripts/generate_conda_file.py @@ -45,7 +45,7 @@ "mock": "mock==2.0.0", "numpy": "numpy>=1.13.3", "pandas": "pandas>=0.23.4,<1.0.0", - "pip": "pip>=19.0.3", + "pip": "pip>=19.2", "pytest": "pytest>=3.6.4", "pytorch": "pytorch-cpu>=1.0.0", "seaborn": "seaborn>=0.8.1", @@ -84,7 +84,7 @@ "nbconvert": "nbconvert==5.5.0", "pydocumentdb": "pydocumentdb>=2.3.3", "pymanopt": "pymanopt==0.2.3", - "xlearn": "xlearn==0.40a1" + "xlearn": "xlearn==0.40a1", } PIP_GPU = {"nvidia-ml-py3": "nvidia-ml-py3>=7.352.0"} diff --git a/setup.py b/setup.py index a603346f8f..f0d63c1c51 100644 --- a/setup.py +++ b/setup.py @@ -1,19 +1,26 @@ # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. -from setuptools import setup -from os import chdir, path +import time +from setuptools import setup, find_packages +from os import chdir, path, environ chdir(path.abspath(path.dirname(__file__))) -VERSION = __import__("reco_utils.__init__").VERSION +version = __import__("reco_utils.__init__").VERSION # Get the long description from the README file with open(path.join("reco_utils", "README.md"), encoding="utf-8") as f: LONG_DESCRIPTION = f.read() +HASH = environ.get("HASH", None) +if HASH is not None: + version += ".post" + str(int(time.time())) + +name = environ.get("NAME", "reco_utils") + setup( - name="reco_utils", - version=VERSION, + name=name, + version=version, description="Recommender System Utilities", long_description=LONG_DESCRIPTION, long_description_content_type="text/markdown", @@ -30,6 +37,8 @@ "Programming Language :: Python :: 3.6", ], keywords="recommendations recommenders recommender system engine machine learning python spark gpu", - packages=["reco_utils"], + package_dir={"reco_utils": "reco_utils"}, + packages=find_packages(where=".", exclude=["tests", "scripts"]), python_requires=">=3.6, <4", ) + diff --git a/tests/ci/azure_artifact_feed.yaml b/tests/ci/azure_artifact_feed.yaml new file mode 100644 index 0000000000..4260ae3a02 --- /dev/null +++ b/tests/ci/azure_artifact_feed.yaml @@ -0,0 +1,99 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. + +# To install recommenders artifact (with manual login to Azure): +# $ pip install artifacts-keyring --pre +# Choose the version to install based on your environment: +# $ pip install reco-utils --index-url=https://pkgs.dev.azure.com/best-practices/recommenders/_packaging/recommenders/pypi/simple/ +# $ pip install reco-utils-gpu --index-url=https://pkgs.dev.azure.com/best-practices/recommenders/_packaging/recommenders/pypi/simple/ +# $ pip install reco-utils-pyspark --index-url=https://pkgs.dev.azure.com/best-practices/recommenders/_packaging/recommenders/pypi/simple/ + +# To install recommenders artifact programmatically with Personal Access Token (PAT): +# First generate the PAT: +# 1. Go to the Personal Access Tokens screen in ADO => User (icon in top right) +# 2. Generate a new token, with at least Packages/Read permissions selected +# 3. Add the PAT in the index-url=https://{feed_name}:{PAT}@pkgs.dev.azure.com/{org_name}/{project_name}/_packaging/{feed_name}/pypi/simple/ +# Install dependencies and Recommenders library +# $ pip install artifacts-keyring --pre +# Choose the version to install based on your environment: +# $ pip install reco-utils --index-url=https://recommenders:{PAT}@pkgs.dev.azure.com/best-practices/recommenders/_packaging/recommenders/pypi/simple/ +# $ pip install reco-utils-gpu --index-url=https://recommenders:{PAT}@pkgs.dev.azure.com/best-practices/recommenders/_packaging/recommenders/pypi/simple/ +# $ pip install reco-utils-pyspark --index-url=https://recommenders:{PAT}@pkgs.dev.azure.com/best-practices/recommenders/_packaging/recommenders/pypi/simple/ + +# NOTE: +# This yaml file is used to generate ADO artifacts for Recommenders library. +# We use the same yaml file to generate the library in different environments: CPU, GPU and PySpark +# When setting the pipeline, we need to define the variables env_name, env_flag and library_name +# To add the variables, go to the pipeline, press edit and then variables. +# An example of the variables we use in the GPU environment: +# env_name=artifact_reco_gpu +# env_flag=--gpu +# library_name=reco_utils_gpu + +# The global variables are defined in ADO/Pipelines/Library +# https://dev.azure.com/best-practices/recommenders/_library?itemType=VariableGroups +variables: +- group: LinuxAgentPool + +# The pipeline will be triggered when we merge to master +pr: none +trigger: +- master + +jobs: +- job: Artifact + displayName: 'Create Recommenders artifact' + timeoutInMinutes: 30 # how long to run the job before automatically cancelling + pool: + name: $(Agent_Pool) + + steps: + - bash: | + echo "##vso[task.prependpath]/data/anaconda/bin" + conda env list + displayName: 'Add Conda to PATH' + + - script: | + conda env remove -n $(env_name) -y + workingDirectory: tests + displayName: 'Conda remove' + continueOnError: true + condition: always() # this step will always run, even if the pipeline is canceled + + - script: | + python ./scripts/generate_conda_file.py --name $(env_name) $(env_flag) + conda env create --quiet -f $(env_name).yaml 2> log + displayName: 'Setup Conda Env' + + - script: | + . /anaconda/etc/profile.d/conda.sh && \ + conda activate $(env_name) && \ + pip install wheel twine keyring artifacts-keyring && \ + pip list + displayName: 'Install dependencies' + + - script: | + . /anaconda/etc/profile.d/conda.sh && \ + conda activate $(env_name) && \ + rm -rf dist && \ + HASH=True NAME=$(library_name) python setup.py sdist bdist_wheel + displayName: 'Build wheel' + + - task: TwineAuthenticate@1 + inputs: + artifactFeed: recommenders/recommenders + displayName: 'Twine Authenticate' + + # Artifact package: https://dev.azure.com/best-practices/recommenders/_packaging?_a=feed&feed=recommenders + - script: | + . /anaconda/etc/profile.d/conda.sh && \ + conda activate $(env_name) && \ + python -m twine upload -r recommenders --config-file $(PYPIRC_PATH) dist/*.whl --verbose + displayName: 'Upload wheel' + + - script: | + conda env remove -n $(env_name) -y + workingDirectory: tests + displayName: 'Conda remove' + continueOnError: true + condition: always() # this step will always run, even if the pipeline is canceled