Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Generate requirements.txt from Python spec #7289

Merged
merged 15 commits into from
Feb 4, 2021
492 changes: 492 additions & 0 deletions python/gen_requirements.py

Large diffs are not rendered by default.

16 changes: 16 additions & 0 deletions python/requirements/all.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
numpy
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'd recommend adding comments at the top of each of these files to indicate what they are used for, since it may not always be obvious from the filename. PIP requirements files support # comments.https://pip.pypa.io/en/latest/reference/pip_install/#requirements-file-format

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I added a way to specify a comment to each piece and that's now placed at the top of each requirements.txt file

synr>=0.2.1
psutil
coremltools
torchvision>=0.5.0
decorator
scipy
onnx>=1.7.0
future
tensorflow>=2.1.0
onnxruntime>=1.0.0
attrs
opencv-python
tflite>=2.1.0
tensorflow-estimator
torch>=1.7.0,<2.0.0
11 changes: 11 additions & 0 deletions python/requirements/core.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
# AUTOGENERATED by python/gen_requirements.py
#
# Base requirements needed to install tvm
attrs
cloudpickle
decorator
numpy
psutil
scipy~=1.4
synr>=0.2.1
tornado
17 changes: 17 additions & 0 deletions python/requirements/dev.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
# AUTOGENERATED by python/gen_requirements.py
#
# Requirements to develop TVM -- lint, docs, testing, etc.
astroid==2.3.3
autodocsumm
black==20.8b1
commonmark>=0.7.3
cpplint==1.5.4
docutils>=0.11
image
matplotlib
pillow
pylint==2.4.4
sphinx
sphinx_autodoc_annotation
sphinx_gallery
sphinx_rtd_theme
5 changes: 5 additions & 0 deletions python/requirements/importer-caffe2.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
# AUTOGENERATED by python/gen_requirements.py
#
# Requirements for the Caffe2 importer
future
torch>=1.4.0
4 changes: 4 additions & 0 deletions python/requirements/importer-coreml.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
# AUTOGENERATED by python/gen_requirements.py
#
# Requirements for the CoreML importer
coremltools
4 changes: 4 additions & 0 deletions python/requirements/importer-darknet.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
# AUTOGENERATED by python/gen_requirements.py
#
# Requirements for the DarkNet importer
opencv-python
5 changes: 5 additions & 0 deletions python/requirements/importer-keras.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
# AUTOGENERATED by python/gen_requirements.py
#
# Requirements for the Keras importer
tensorflow>=2.1.0
tensorflow-estimator
8 changes: 8 additions & 0 deletions python/requirements/importer-onnx.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
# AUTOGENERATED by python/gen_requirements.py
#
# Requirements for the ONNX importer
future
onnx==1.6.0
onnxruntime==1.0.0
torch>=1.4.0
torchvision>=0.5.0
6 changes: 6 additions & 0 deletions python/requirements/importer-pytorch.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
# AUTOGENERATED by python/gen_requirements.py
#
# Requirements for the PyTorch importer
future
torch>=1.4.0
torchvision>=0.5.0
5 changes: 5 additions & 0 deletions python/requirements/importer-tensorflow.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
# AUTOGENERATED by python/gen_requirements.py
#
# Requirements for the TensorFlow importer
tensorflow>=2.1.0
tensorflow-estimator
6 changes: 6 additions & 0 deletions python/requirements/importer-tflite.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
# AUTOGENERATED by python/gen_requirements.py
#
# Requirements for the TFLite importer
tensorflow>=2.1.0
tensorflow-estimator
tflite>=2.1.0
11 changes: 11 additions & 0 deletions python/requirements/tvmc.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
# AUTOGENERATED by python/gen_requirements.py
#
# Requirements for the tvmc command-line tool
future
onnx==1.6.0
onnxruntime==1.0.0
tensorflow>=2.1.0
tflite>=2.1.0
torch>=1.4.0
torchvision>=0.5.0
xgboost
6 changes: 6 additions & 0 deletions python/requirements/xgboost.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
# AUTOGENERATED by python/gen_requirements.py
#
# Requirements for XGBoost autotuning
future
torch>=1.4.0
xgboost
39 changes: 13 additions & 26 deletions python/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -171,38 +171,25 @@ def get_package_data_files():
return ["relay/std/prelude.rly", "relay/std/core.rly"]


# Temporarily add this directory to the path so we can import the requirements generator
# tool.
sys.path.insert(0, os.path.dirname(__file__))
import gen_requirements
sys.path.pop(0)

requirements = gen_requirements.join_requirements()
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Do you want to print out some details on what the requirements identified are, to help with debugging?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

what do you mean exactly?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Well, just a suggestion that when this is being invoked, and the requirements being generated automatically, it might be useful to have them dumped to stdout during the invocation (perhaps optionally) so someone debugging issues with Python deps can readily see what they were. Not sure if they are exposed in an obvious way elsewhere.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I guess, I don't typically expect to see the requirements printed when I run python setup.py. are there cases where you do? I could add a comment saying: to produce requirements.txt files, run python gen_requirements.py above this line, which is kind of the typical case i'd expect someone to be looking for them.

extras_require = {k: v
for k, v in requirements.items()
if k not in ("all", "core")}

setup(
name="tvm",
version=__version__,
description="TVM: An End to End Tensor IR/DSL Stack for Deep Learning Systems",
zip_safe=False,
entry_points={"console_scripts": ["tvmc = tvm.driver.tvmc.main:main"]},
install_requires=[
"numpy",
"scipy",
"decorator",
"attrs",
"psutil",
"synr>=0.2.1",
],
extras_require={
"test": ["pillow<7", "matplotlib"],
"extra_feature": [
"tornado",
"psutil",
"xgboost>=1.1.0",
"mypy",
"orderedset",
],
"tvmc": [
"tensorflow>=2.1.0",
"tflite>=2.1.0",
"onnx>=1.7.0",
"onnxruntime>=1.0.0",
"torch>=1.4.0",
"torchvision>=0.5.0",
],
},
install_requires=requirements["core"],
extras_require=extras_require,
packages=find_packages(),
package_dir={"tvm": "tvm"},
package_data={"tvm": get_package_data_files()},
Expand Down
184 changes: 184 additions & 0 deletions tests/python/unittest/test_gen_requirements.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,184 @@
#!/usr/bin/env python3
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.

"""Tests for gen_requirements, found in python/."""

import collections
import contextlib
import os
import sys

import tvm

import pytest

# Insert the parent dir to python/tvm into the import path, so that gen_requirements may be
# imported.
sys.path.insert(0, os.path.dirname(tvm.__file__))
try:
import gen_requirements
finally:
sys.path.pop(0)


@contextlib.contextmanager
def patch(obj, **kw):
old = {}
for prop_name, new in kw.items():
old[prop_name] = getattr(obj, prop_name)
setattr(obj, prop_name, new)
yield
for prop_name, value in old.items():
setattr(obj, prop_name, value)


PROBLEM_REQUIREMENTS = [
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Nit: I think this test would be easier to maintain if the expected problem string were part of this list, rather than hardcoded in the test case in the same order below.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I guess then though, you're doing processing to construct the test stimulus, which you could screw up. plus, each line here could create multiple problem strings or contribute to composite problem strings such as the final one.

("extras-pre-core", ("", ["foo", 123])), # entry before core
(456, ("", ["foo", "bar"])), # invalid extras name, deps should not be processed
("core", ("", ["foo"])), # ordinary core entry.
("wrong-description-type", (None, ["foo"])), # wrong description type
("bad-value", None), # value field is not a 2-tuple
("bad-value-2", ("", ["foo"], 34)), # value field is not a 2-tuple
("invalid", ("", ["qux"])), # duplicate invalid entry, all items valid.
("extras-foo", ("", ["bar", "baz"])), # ordinary extras entry.
("invalid", ("", ["baz", None, 123])), # valid extra name, invalid deps.
("unsorted", ("", ["qux", "bar", "foo"])), # deps out of order
("versioned_dep", ("", ["baz==1.2", "foo==^2.0", "buz<3", "bar>4"])),
("duplicate_dep", ("", ["buz", "buz", "foo"])), # duplicate listed dependency
("dev", ("", ["baz", "qux"])), # ordinary dev entry.
("extras-post-dev", ("", ["bar", "buzz"])), # entry after dev
]


def test_validate_requirements():
with patch(gen_requirements, REQUIREMENTS_BY_PIECE=None):
assert (gen_requirements.validate_requirements_by_piece() ==
["must be list or tuple, see None"])

with patch(gen_requirements, REQUIREMENTS_BY_PIECE=PROBLEM_REQUIREMENTS):
problems = gen_requirements.validate_requirements_by_piece()
assert problems == [
'piece extras-pre-core: must list after "core" (core must be first)',
"piece extras-pre-core: deps should be a list of strings, got ['foo', 123]",
'piece 456: must be str',
'piece wrong-description-type: description should be a string, got None',
('piece bad-value: should be formatted like ("bad-value", ("<requirements.txt '
'comment>", ["dep1", "dep2", ...])). got: None'),
('piece bad-value-2: should be formatted like ("bad-value-2", '
'("<requirements.txt comment>", ["dep1", "dep2", ...])). got: (\'\', '
'[\'foo\'], 34)'),
'piece invalid: listed twice',
"piece invalid: deps should be a list of strings, got ['baz', None, 123]",
"piece unsorted: deps must be sorted. Correct order:\n ['bar', 'foo', 'qux']",
"piece versioned_dep: deps must be sorted. Correct order:\n ['bar>4', 'baz==1.2', 'buz<3', 'foo==^2.0']",
"piece versioned_dep: dependency baz==1.2 should not specify a version. Add it to CONSTRAINTS instead.",
"piece versioned_dep: dependency foo==^2.0 should not specify a version. Add it to CONSTRAINTS instead.",
"piece versioned_dep: dependency buz<3 should not specify a version. Add it to CONSTRAINTS instead.",
"piece versioned_dep: dependency bar>4 should not specify a version. Add it to CONSTRAINTS instead.",
"piece duplicate_dep: dependency buz listed twice",
'piece extras-post-dev: must list before "dev" (dev must be last)',
'pieces other than "core" and "dev" must appear in alphabetical order: '
"['bad-value', 'bad-value-2', 'duplicate_dep', 'extras-foo', 'extras-post-dev', "
"'extras-pre-core', 'invalid', 'invalid', 'unsorted', 'versioned_dep', "
"'wrong-description-type']",
]


TEST_REQUIREMENTS_BY_PIECE = (
("core", ("core tvm requirements", ("bar", "foo", "non-constrained"))),
("extra-one", ("requirements for one feature", ("baz", "qux"))),
("extra-two", ("requirements for two feature", ("buz", "qux", "semver-minor", "semver-patch"))),
("dev", ("requirements for dev", ("buz", "oof", "rab"))),
)


def test_validate_constraints():
with patch(gen_requirements,
REQUIREMENTS_BY_PIECE=TEST_REQUIREMENTS_BY_PIECE,
CONSTRAINTS=(('unlisted', '~=3'),
('double-specified', '<2'),
('double-specified', '==3',),
('bad-constraint', '1.2.0'),
('bad-semver-constraint', "i don't match the regex :P"),
('alpha-semver-constraint', '^foo.bar.23'))):
problems = gen_requirements.validate_constraints()
assert problems == ["unlisted: not specified in REQUIREMENTS_BY_PIECE",
"double-specified: not specified in REQUIREMENTS_BY_PIECE",
"double-specified: specified twice",
"double-specified: not specified in REQUIREMENTS_BY_PIECE",
"bad-constraint: not specified in REQUIREMENTS_BY_PIECE",
'bad-constraint: constraint "1.2.0" does not look like a valid constraint',
'bad-semver-constraint: not specified in REQUIREMENTS_BY_PIECE',
'bad-semver-constraint: constraint "i don\'t match the regex :P" does not look like a valid constraint',
'alpha-semver-constraint: not specified in REQUIREMENTS_BY_PIECE',
'alpha-semver-constraint: invalid semver constraint ^foo.bar.23',
"CONSTRAINTS entries should be in this sorted order: ['alpha-semver-constraint', 'bad-constraint', 'bad-semver-constraint', 'double-specified', 'double-specified', 'unlisted']"
]


TEST_CONSTRAINTS = (
("bar", "==1.0"),
("baz", ">2.3"),
("buz", "^1.3.0"),
("non-constrained", None), # Support a comment.
("oof", "==0.3.4"),
("qux", "~=1.2.4"),
("semver-minor", "^0.2.2-patch2.post3+buildmeta"), # Ensure prerelease and buildmeta preserved.
("semver-patch", "^0.0.2+bm"), # Ensure postrelease preserved.
)


def test_join_requirements():
with patch(gen_requirements, REQUIREMENTS_BY_PIECE=TEST_REQUIREMENTS_BY_PIECE,
CONSTRAINTS=TEST_CONSTRAINTS):
requirements = gen_requirements.join_requirements()
assert requirements == collections.OrderedDict([
('core', ("core tvm requirements", ['bar==1.0', 'foo', 'non-constrained'])),
('extra-one', ('requirements for one feature', ['baz>2.3', 'qux~=1.2.4'])),
('extra-two', ('requirements for two feature',
['buz>=1.3.0,<2.0.0', 'qux~=1.2.4',
'semver-minor>=0.2.2-patch2.post3+buildmeta,<0.3.0',
'semver-patch>=0.0.2+bm,<0.0.3'])),
('dev', ('requirements for dev', ['buz>=1.3.0,<2.0.0', 'oof==0.3.4', 'rab'])),
('all-prod', ('Combined dependencies for all TVM pieces, excluding dev',
['bar==1.0', 'baz>2.3', 'buz>=1.3.0,<2.0.0', 'foo', 'non-constrained',
'qux~=1.2.4',
'semver-minor>=0.2.2-patch2.post3+buildmeta,<0.3.0',
'semver-patch>=0.0.2+bm,<0.0.3']))])


def test_semver():
problems = []

assert gen_requirements.parse_semver("C", "^1.2.0", problems) == (["1", "2", "0"], 0, 1)
assert problems == []

assert gen_requirements.parse_semver("C", "^0.2.0", problems) == (["0", "2", "0"], 1, 2)
assert problems == []

assert gen_requirements.parse_semver("C", "^0.0.0", problems) == (["0", "0", "0"], 0, 0)
assert problems == []

assert gen_requirements.parse_semver("C", "^0.a.0", problems) == ([], 0, 0)
assert problems == ["C: invalid semver constraint ^0.a.0"]




if __name__ == "__main__":
sys.exit(pytest.main([__file__] + sys.argv[1:]))