diff --git a/.gitignore b/.gitignore index 4a00b64..ec14d53 100644 --- a/.gitignore +++ b/.gitignore @@ -62,3 +62,6 @@ target/ #Ipython Notebook .ipynb_checkpoints + +# Rider +.idea diff --git a/README.md b/README.md index c50985a..d6a06aa 100644 --- a/README.md +++ b/README.md @@ -16,12 +16,28 @@ pip install kaggle ## Development +### Kaggle Internal + +Obviously, this depends on Kaggle services. When you're extending the API and modifying +or adding to those services, you should be working in your Kaggle mid-tier development +environment. You'll run Kaggle locally, in the container, and test the Python code by +running it in the container so it can connect to your local testing environment. + +Also, run the following command to get `autogen.sh` installed: ```bash +rm -rf /tmp/autogen && mkdir -p /tmp/autogen && unzip -qo /tmp/autogen.zip -d /tmp/autogen && +mv /tmp/autogen/autogen-*/* /tmp/autogen && rm -rf /tmp/autogen/autogen-* && +sudo chmod a+rx /tmp/autogen/autogen.sh +``` + ### Prerequisites We use [hatch](https://hatch.pypa.io) to manage this project. Follow these [instructions](https://hatch.pypa.io/latest/install/) to install it. +If you are working in a managed environment, you may want to use `pipx`. If it isn't already installed +try `sudo apt install pipx`. Then you should be able to proceed with `pipx install hatch`. + ### Dependencies ```sh diff --git a/docs/KaggleApi.md b/docs/KaggleApi.md index 4415988..36d523c 100644 --- a/docs/KaggleApi.md +++ b/docs/KaggleApi.md @@ -33,8 +33,11 @@ Method | HTTP request | Description [**kernel_push**](KaggleApi.md#kernel_push) | **POST** /kernels/push | Push a new kernel version. Can be used to create a new kernel and update an existing one. [**kernel_status**](KaggleApi.md#kernel_status) | **GET** /kernels/status | Get the status of the latest kernel version [**kernels_list**](KaggleApi.md#kernels_list) | **GET** /kernels/list | List kernels +[**kernels_list_files**](KaggleApi.md#kernels_list_files) | **GET** /kernels/files | List kernel files [**metadata_get**](KaggleApi.md#metadata_get) | **GET** /datasets/metadata/{ownerSlug}/{datasetSlug} | Get the metadata for a dataset [**metadata_post**](KaggleApi.md#metadata_post) | **POST** /datasets/metadata/{ownerSlug}/{datasetSlug} | Update the metadata for a dataset +[**model_instance_files**](KaggleApi.md#model_instance_files) | **GET** /models/{ownerSlug}/{modelSlug}/{framework}/{instanceSlug}/files | List model instance files for the current version +[**model_instance_version_files**](KaggleApi.md#model_instance_version_files) | **GET** /models/{ownerSlug}/{modelSlug}/{framework}/{instanceSlug}/{versionNumber}/files | List model instance version files [**model_instance_versions_download**](KaggleApi.md#model_instance_versions_download) | **GET** /models/{ownerSlug}/{modelSlug}/{framework}/{instanceSlug}/{versionNumber}/download | Download model instance version files [**models_create_instance**](KaggleApi.md#models_create_instance) | **POST** /models/{ownerSlug}/{modelSlug}/create/instance | Create a new model instance [**models_create_instance_version**](KaggleApi.md#models_create_instance_version) | **POST** /models/{ownerSlug}/{modelSlug}/{framework}/{instanceSlug}/create/version | Create a new model instance version @@ -252,7 +255,7 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **competitions_data_list_files** -> Result competitions_data_list_files(id) +> Result competitions_data_list_files(id, page_token=page_token, page_size=page_size) List competition data files @@ -272,10 +275,12 @@ configuration.password = 'YOUR_PASSWORD' # create an instance of the API class api_instance = kaggle.KaggleApi(kaggle.ApiClient(configuration)) id = 'id_example' # str | Competition name +page_token = 'page_token_example' # str | Page token for pagination (optional) +page_size = 20 # int | Number of items per page (default 20) (optional) (default to 20) try: # List competition data files - api_response = api_instance.competitions_data_list_files(id) + api_response = api_instance.competitions_data_list_files(id, page_token=page_token, page_size=page_size) pprint(api_response) except ApiException as e: print("Exception when calling KaggleApi->competitions_data_list_files: %s\n" % e) @@ -286,6 +291,8 @@ except ApiException as e: Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- **id** | **str**| Competition name | + **page_token** | **str**| Page token for pagination | [optional] + **page_size** | **int**| Number of items per page (default 20) | [optional] [default to 20] ### Return type @@ -977,7 +984,7 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) # **datasets_list_files** -> Result datasets_list_files(owner_slug, dataset_slug, dataset_version_number=dataset_version_number) +> Result datasets_list_files(owner_slug, dataset_slug, dataset_version_number=dataset_version_number, page_token=page_token, page_size=page_size) List dataset files @@ -999,10 +1006,12 @@ api_instance = kaggle.KaggleApi(kaggle.ApiClient(configuration)) owner_slug = 'owner_slug_example' # str | Dataset owner dataset_slug = 'dataset_slug_example' # str | Dataset name dataset_version_number = 'dataset_version_number_example' # str | Dataset version number (optional) +page_token = 'page_token_example' # str | Page token for pagination (optional) +page_size = 20 # int | Number of items per page (default 20) (optional) (default to 20) try: # List dataset files - api_response = api_instance.datasets_list_files(owner_slug, dataset_slug, dataset_version_number=dataset_version_number) + api_response = api_instance.datasets_list_files(owner_slug, dataset_slug, dataset_version_number=dataset_version_number, page_token=page_token, page_size=page_size) pprint(api_response) except ApiException as e: print("Exception when calling KaggleApi->datasets_list_files: %s\n" % e) @@ -1015,6 +1024,8 @@ Name | Type | Description | Notes **owner_slug** | **str**| Dataset owner | **dataset_slug** | **str**| Dataset name | **dataset_version_number** | **str**| Dataset version number | [optional] + **page_token** | **str**| Page token for pagination | [optional] + **page_size** | **int**| Number of items per page (default 20) | [optional] [default to 20] ### Return type @@ -1594,7 +1605,7 @@ configuration.password = 'YOUR_PASSWORD' # create an instance of the API class api_instance = kaggle.KaggleApi(kaggle.ApiClient(configuration)) page = 1 # int | Page number (optional) (default to 1) -page_size = 20 # int | Page size (optional) (default to 20) +page_size = 20 # int | Number of items per page (default 20) (optional) (default to 20) search = '' # str | Search terms (optional) (default to ) group = 'everyone' # str | Display only your kernels (optional) (default to everyone) user = 'user_example' # str | Display kernels by a particular group (optional) @@ -1619,7 +1630,7 @@ except ApiException as e: Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- **page** | **int**| Page number | [optional] [default to 1] - **page_size** | **int**| Page size | [optional] [default to 20] + **page_size** | **int**| Number of items per page (default 20) | [optional] [default to 20] **search** | **str**| Search terms | [optional] [default to ] **group** | **str**| Display only your kernels | [optional] [default to everyone] **user** | **str**| Display kernels by a particular group | [optional] @@ -1646,6 +1657,65 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) +# **kernels_list_files** +> Result kernels_list_files(user_name, kernel_slug, kernel_version_number=kernel_version_number, page_size=page_size, page_token=page_token) + +List kernel files + +### Example +```python +from __future__ import print_function +import time +import kaggle +from kaggle.rest import ApiException +from pprint import pprint + +# Configure HTTP basic authorization: basicAuth +configuration = kaggle.Configuration() +configuration.username = 'YOUR_USERNAME' +configuration.password = 'YOUR_PASSWORD' + +# create an instance of the API class +api_instance = kaggle.KaggleApi(kaggle.ApiClient(configuration)) +user_name = 'user_name_example' # str | Kernel owner +kernel_slug = 'kernel_slug_example' # str | Kernel name +kernel_version_number = 'kernel_version_number_example' # str | Kernel version number (optional) +page_size = 20 # int | Number of items per page (default 20) (optional) (default to 20) +page_token = 'page_token_example' # str | Page token for pagination (optional) + +try: + # List kernel files + api_response = api_instance.kernels_list_files(user_name, kernel_slug, kernel_version_number=kernel_version_number, page_size=page_size, page_token=page_token) + pprint(api_response) +except ApiException as e: + print("Exception when calling KaggleApi->kernels_list_files: %s\n" % e) +``` + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **user_name** | **str**| Kernel owner | + **kernel_slug** | **str**| Kernel name | + **kernel_version_number** | **str**| Kernel version number | [optional] + **page_size** | **int**| Number of items per page (default 20) | [optional] [default to 20] + **page_token** | **str**| Page token for pagination | [optional] + +### Return type + +[**Result**](Result.md) + +### Authorization + +[basicAuth](../README.md#basicAuth) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + # **metadata_get** > Result metadata_get(owner_slug, dataset_slug) @@ -1754,6 +1824,130 @@ Name | Type | Description | Notes [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) +# **model_instance_files** +> Result model_instance_files(owner_slug, model_slug, framework, instance_slug, page_size=page_size, page_token=page_token) + +List model instance files for the current version + +### Example +```python +from __future__ import print_function +import time +import kaggle +from kaggle.rest import ApiException +from pprint import pprint + +# Configure HTTP basic authorization: basicAuth +configuration = kaggle.Configuration() +configuration.username = 'YOUR_USERNAME' +configuration.password = 'YOUR_PASSWORD' + +# create an instance of the API class +api_instance = kaggle.KaggleApi(kaggle.ApiClient(configuration)) +owner_slug = 'owner_slug_example' # str | Model owner +model_slug = 'model_slug_example' # str | Model name +framework = 'framework_example' # str | Model instance framework +instance_slug = 'instance_slug_example' # str | Model instance slug +page_size = 20 # int | Number of items per page (default 20) (optional) (default to 20) +page_token = 'page_token_example' # str | Page token for pagination (optional) + +try: + # List model instance files for the current version + api_response = api_instance.model_instance_files(owner_slug, model_slug, framework, instance_slug, page_size=page_size, page_token=page_token) + pprint(api_response) +except ApiException as e: + print("Exception when calling KaggleApi->model_instance_files: %s\n" % e) +``` + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **owner_slug** | **str**| Model owner | + **model_slug** | **str**| Model name | + **framework** | **str**| Model instance framework | + **instance_slug** | **str**| Model instance slug | + **page_size** | **int**| Number of items per page (default 20) | [optional] [default to 20] + **page_token** | **str**| Page token for pagination | [optional] + +### Return type + +[**Result**](Result.md) + +### Authorization + +[basicAuth](../README.md#basicAuth) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + +# **model_instance_version_files** +> Result model_instance_version_files(owner_slug, model_slug, framework, instance_slug, version_number, page_size=page_size, page_token=page_token) + +List model instance version files + +### Example +```python +from __future__ import print_function +import time +import kaggle +from kaggle.rest import ApiException +from pprint import pprint + +# Configure HTTP basic authorization: basicAuth +configuration = kaggle.Configuration() +configuration.username = 'YOUR_USERNAME' +configuration.password = 'YOUR_PASSWORD' + +# create an instance of the API class +api_instance = kaggle.KaggleApi(kaggle.ApiClient(configuration)) +owner_slug = 'owner_slug_example' # str | Model owner +model_slug = 'model_slug_example' # str | Model name +framework = 'framework_example' # str | Model instance framework +instance_slug = 'instance_slug_example' # str | Model instance slug +version_number = 'version_number_example' # str | Model instance version number +page_size = 20 # int | Number of items per page (default 20) (optional) (default to 20) +page_token = 'page_token_example' # str | Page token for pagination (optional) + +try: + # List model instance version files + api_response = api_instance.model_instance_version_files(owner_slug, model_slug, framework, instance_slug, version_number, page_size=page_size, page_token=page_token) + pprint(api_response) +except ApiException as e: + print("Exception when calling KaggleApi->model_instance_version_files: %s\n" % e) +``` + +### Parameters + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **owner_slug** | **str**| Model owner | + **model_slug** | **str**| Model name | + **framework** | **str**| Model instance framework | + **instance_slug** | **str**| Model instance slug | + **version_number** | **str**| Model instance version number | + **page_size** | **int**| Number of items per page (default 20) | [optional] [default to 20] + **page_token** | **str**| Page token for pagination | [optional] + +### Return type + +[**Result**](Result.md) + +### Authorization + +[basicAuth](../README.md#basicAuth) + +### HTTP request headers + + - **Content-Type**: Not defined + - **Accept**: application/json + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md) + # **model_instance_versions_download** > Result model_instance_versions_download(owner_slug, model_slug, framework, instance_slug, version_number) @@ -2001,7 +2195,7 @@ api_instance = kaggle.KaggleApi(kaggle.ApiClient(configuration)) search = '' # str | Search terms (optional) (default to ) sort_by = 'hotness' # str | Sort the results (optional) (default to hotness) owner = 'owner_example' # str | Display models by a specific user or organization (optional) -page_size = 1 # int | Page size (optional) (default to 1) +page_size = 20 # int | Number of items per page (default 20) (optional) (default to 20) page_token = 'page_token_example' # str | Page token for pagination (optional) try: @@ -2019,7 +2213,7 @@ Name | Type | Description | Notes **search** | **str**| Search terms | [optional] [default to ] **sort_by** | **str**| Sort the results | [optional] [default to hotness] **owner** | **str**| Display models by a specific user or organization | [optional] - **page_size** | **int**| Page size | [optional] [default to 1] + **page_size** | **int**| Number of items per page (default 20) | [optional] [default to 20] **page_token** | **str**| Page token for pagination | [optional] ### Return type diff --git a/kaggle/api/kaggle_api.py b/kaggle/api/kaggle_api.py index 114e35b..28a0ba3 100644 --- a/kaggle/api/kaggle_api.py +++ b/kaggle/api/kaggle_api.py @@ -423,6 +423,8 @@ def competitions_data_list_files(self, id, **kwargs): # noqa: E501 :param async_req bool :param str id: Competition name (required) + :param str page_token: Page token for pagination + :param int page_size: Number of items per page (default 20) :return: Result If the method is called asynchronously, returns the request thread. @@ -444,12 +446,14 @@ def competitions_data_list_files_with_http_info(self, id, **kwargs): # noqa: E5 :param async_req bool :param str id: Competition name (required) + :param str page_token: Page token for pagination + :param int page_size: Number of items per page (default 20) :return: Result If the method is called asynchronously, returns the request thread. """ - all_params = ['id'] # noqa: E501 + all_params = ['id', 'page_token', 'page_size'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -476,6 +480,10 @@ def competitions_data_list_files_with_http_info(self, id, **kwargs): # noqa: E5 path_params['id'] = params['id'] # noqa: E501 query_params = [] + if 'page_token' in params: + query_params.append(('pageToken', params['page_token'])) # noqa: E501 + if 'page_size' in params: + query_params.append(('pageSize', params['page_size'])) # noqa: E501 header_params = {} @@ -1830,6 +1838,8 @@ def datasets_list_files(self, owner_slug, dataset_slug, **kwargs): # noqa: E501 :param str owner_slug: Dataset owner (required) :param str dataset_slug: Dataset name (required) :param str dataset_version_number: Dataset version number + :param str page_token: Page token for pagination + :param int page_size: Number of items per page (default 20) :return: Result If the method is called asynchronously, returns the request thread. @@ -1853,12 +1863,14 @@ def datasets_list_files_with_http_info(self, owner_slug, dataset_slug, **kwargs) :param str owner_slug: Dataset owner (required) :param str dataset_slug: Dataset name (required) :param str dataset_version_number: Dataset version number + :param str page_token: Page token for pagination + :param int page_size: Number of items per page (default 20) :return: Result If the method is called asynchronously, returns the request thread. """ - all_params = ['owner_slug', 'dataset_slug', 'dataset_version_number'] # noqa: E501 + all_params = ['owner_slug', 'dataset_slug', 'dataset_version_number', 'page_token', 'page_size'] # noqa: E501 all_params.append('async_req') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -1893,6 +1905,10 @@ def datasets_list_files_with_http_info(self, owner_slug, dataset_slug, **kwargs) query_params = [] if 'dataset_version_number' in params: query_params.append(('datasetVersionNumber', params['dataset_version_number'])) # noqa: E501 + if 'page_token' in params: + query_params.append(('pageToken', params['page_token'])) # noqa: E501 + if 'page_size' in params: + query_params.append(('pageSize', params['page_size'])) # noqa: E501 header_params = {} @@ -3011,7 +3027,7 @@ def kernels_list(self, **kwargs): # noqa: E501 :param async_req bool :param int page: Page number - :param int page_size: Page size + :param int page_size: Number of items per page (default 20) :param str search: Search terms :param str group: Display only your kernels :param str user: Display kernels by a particular group @@ -3043,7 +3059,7 @@ def kernels_list_with_http_info(self, **kwargs): # noqa: E501 :param async_req bool :param int page: Page number - :param int page_size: Page size + :param int page_size: Number of items per page (default 20) :param str search: Search terms :param str group: Display only your kernels :param str user: Display kernels by a particular group @@ -3134,6 +3150,119 @@ def kernels_list_with_http_info(self, **kwargs): # noqa: E501 _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) + def kernels_list_files(self, user_name, kernel_slug, **kwargs): # noqa: E501 + """List kernel files # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.kernels_list_files(user_name, kernel_slug, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str user_name: Kernel owner (required) + :param str kernel_slug: Kernel name (required) + :param str kernel_version_number: Kernel version number + :param int page_size: Number of items per page (default 20) + :param str page_token: Page token for pagination + :return: Result + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.kernels_list_files_with_http_info(user_name, kernel_slug, **kwargs) # noqa: E501 + else: + (data) = self.kernels_list_files_with_http_info(user_name, kernel_slug, **kwargs) # noqa: E501 + return data + + def kernels_list_files_with_http_info(self, user_name, kernel_slug, **kwargs): # noqa: E501 + """List kernel files # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.kernels_list_files_with_http_info(user_name, kernel_slug, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str user_name: Kernel owner (required) + :param str kernel_slug: Kernel name (required) + :param str kernel_version_number: Kernel version number + :param int page_size: Number of items per page (default 20) + :param str page_token: Page token for pagination + :return: Result + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['user_name', 'kernel_slug', 'kernel_version_number', 'page_size', 'page_token'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method kernels_list_files" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'user_name' is set + if ('user_name' not in params or + params['user_name'] is None): + raise ValueError("Missing the required parameter `user_name` when calling `kernels_list_files`") # noqa: E501 + # verify the required parameter 'kernel_slug' is set + if ('kernel_slug' not in params or + params['kernel_slug'] is None): + raise ValueError("Missing the required parameter `kernel_slug` when calling `kernels_list_files`") # noqa: E501 + + collection_formats = {} + + path_params = {} + + query_params = [] + if 'user_name' in params: + query_params.append(('userName', params['user_name'])) # noqa: E501 + if 'kernel_slug' in params: + query_params.append(('kernelSlug', params['kernel_slug'])) # noqa: E501 + if 'kernel_version_number' in params: + query_params.append(('kernelVersionNumber', params['kernel_version_number'])) # noqa: E501 + if 'page_size' in params: + query_params.append(('pageSize', params['page_size'])) # noqa: E501 + if 'page_token' in params: + query_params.append(('pageToken', params['page_token'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = ['basicAuth'] # noqa: E501 + + return self.api_client.call_api( + '/kernels/files', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='Result', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + def metadata_get(self, owner_slug, dataset_slug, **kwargs): # noqa: E501 """Get the metadata for a dataset # noqa: E501 @@ -3348,6 +3477,264 @@ def metadata_post_with_http_info(self, owner_slug, dataset_slug, settings, **kwa _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) + def model_instance_files(self, owner_slug, model_slug, framework, instance_slug, **kwargs): # noqa: E501 + """List model instance files for the current version # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.model_instance_files(owner_slug, model_slug, framework, instance_slug, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str owner_slug: Model owner (required) + :param str model_slug: Model name (required) + :param str framework: Model instance framework (required) + :param str instance_slug: Model instance slug (required) + :param int page_size: Number of items per page (default 20) + :param str page_token: Page token for pagination + :return: Result + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.model_instance_files_with_http_info(owner_slug, model_slug, framework, instance_slug, **kwargs) # noqa: E501 + else: + (data) = self.model_instance_files_with_http_info(owner_slug, model_slug, framework, instance_slug, **kwargs) # noqa: E501 + return data + + def model_instance_files_with_http_info(self, owner_slug, model_slug, framework, instance_slug, **kwargs): # noqa: E501 + """List model instance files for the current version # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.model_instance_files_with_http_info(owner_slug, model_slug, framework, instance_slug, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str owner_slug: Model owner (required) + :param str model_slug: Model name (required) + :param str framework: Model instance framework (required) + :param str instance_slug: Model instance slug (required) + :param int page_size: Number of items per page (default 20) + :param str page_token: Page token for pagination + :return: Result + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['owner_slug', 'model_slug', 'framework', 'instance_slug', 'page_size', 'page_token'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method model_instance_files" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'owner_slug' is set + if ('owner_slug' not in params or + params['owner_slug'] is None): + raise ValueError("Missing the required parameter `owner_slug` when calling `model_instance_files`") # noqa: E501 + # verify the required parameter 'model_slug' is set + if ('model_slug' not in params or + params['model_slug'] is None): + raise ValueError("Missing the required parameter `model_slug` when calling `model_instance_files`") # noqa: E501 + # verify the required parameter 'framework' is set + if ('framework' not in params or + params['framework'] is None): + raise ValueError("Missing the required parameter `framework` when calling `model_instance_files`") # noqa: E501 + # verify the required parameter 'instance_slug' is set + if ('instance_slug' not in params or + params['instance_slug'] is None): + raise ValueError("Missing the required parameter `instance_slug` when calling `model_instance_files`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'owner_slug' in params: + path_params['ownerSlug'] = params['owner_slug'] # noqa: E501 + if 'model_slug' in params: + path_params['modelSlug'] = params['model_slug'] # noqa: E501 + if 'framework' in params: + path_params['framework'] = params['framework'] # noqa: E501 + if 'instance_slug' in params: + path_params['instanceSlug'] = params['instance_slug'] # noqa: E501 + + query_params = [] + if 'page_size' in params: + query_params.append(('pageSize', params['page_size'])) # noqa: E501 + if 'page_token' in params: + query_params.append(('pageToken', params['page_token'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = ['basicAuth'] # noqa: E501 + + return self.api_client.call_api( + '/models/{ownerSlug}/{modelSlug}/{framework}/{instanceSlug}/files', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='Result', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def model_instance_version_files(self, owner_slug, model_slug, framework, instance_slug, version_number, **kwargs): # noqa: E501 + """List model instance version files # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.model_instance_version_files(owner_slug, model_slug, framework, instance_slug, version_number, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str owner_slug: Model owner (required) + :param str model_slug: Model name (required) + :param str framework: Model instance framework (required) + :param str instance_slug: Model instance slug (required) + :param str version_number: Model instance version number (required) + :param int page_size: Number of items per page (default 20) + :param str page_token: Page token for pagination + :return: Result + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async_req'): + return self.model_instance_version_files_with_http_info(owner_slug, model_slug, framework, instance_slug, version_number, **kwargs) # noqa: E501 + else: + (data) = self.model_instance_version_files_with_http_info(owner_slug, model_slug, framework, instance_slug, version_number, **kwargs) # noqa: E501 + return data + + def model_instance_version_files_with_http_info(self, owner_slug, model_slug, framework, instance_slug, version_number, **kwargs): # noqa: E501 + """List model instance version files # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async_req=True + >>> thread = api.model_instance_version_files_with_http_info(owner_slug, model_slug, framework, instance_slug, version_number, async_req=True) + >>> result = thread.get() + + :param async_req bool + :param str owner_slug: Model owner (required) + :param str model_slug: Model name (required) + :param str framework: Model instance framework (required) + :param str instance_slug: Model instance slug (required) + :param str version_number: Model instance version number (required) + :param int page_size: Number of items per page (default 20) + :param str page_token: Page token for pagination + :return: Result + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['owner_slug', 'model_slug', 'framework', 'instance_slug', 'version_number', 'page_size', 'page_token'] # noqa: E501 + all_params.append('async_req') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method model_instance_version_files" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'owner_slug' is set + if ('owner_slug' not in params or + params['owner_slug'] is None): + raise ValueError("Missing the required parameter `owner_slug` when calling `model_instance_version_files`") # noqa: E501 + # verify the required parameter 'model_slug' is set + if ('model_slug' not in params or + params['model_slug'] is None): + raise ValueError("Missing the required parameter `model_slug` when calling `model_instance_version_files`") # noqa: E501 + # verify the required parameter 'framework' is set + if ('framework' not in params or + params['framework'] is None): + raise ValueError("Missing the required parameter `framework` when calling `model_instance_version_files`") # noqa: E501 + # verify the required parameter 'instance_slug' is set + if ('instance_slug' not in params or + params['instance_slug'] is None): + raise ValueError("Missing the required parameter `instance_slug` when calling `model_instance_version_files`") # noqa: E501 + # verify the required parameter 'version_number' is set + if ('version_number' not in params or + params['version_number'] is None): + raise ValueError("Missing the required parameter `version_number` when calling `model_instance_version_files`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'owner_slug' in params: + path_params['ownerSlug'] = params['owner_slug'] # noqa: E501 + if 'model_slug' in params: + path_params['modelSlug'] = params['model_slug'] # noqa: E501 + if 'framework' in params: + path_params['framework'] = params['framework'] # noqa: E501 + if 'instance_slug' in params: + path_params['instanceSlug'] = params['instance_slug'] # noqa: E501 + if 'version_number' in params: + path_params['versionNumber'] = params['version_number'] # noqa: E501 + + query_params = [] + if 'page_size' in params: + query_params.append(('pageSize', params['page_size'])) # noqa: E501 + if 'page_token' in params: + query_params.append(('pageToken', params['page_token'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = ['basicAuth'] # noqa: E501 + + return self.api_client.call_api( + '/models/{ownerSlug}/{modelSlug}/{framework}/{instanceSlug}/{versionNumber}/files', 'GET', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='Result', # noqa: E501 + auth_settings=auth_settings, + async_req=params.get('async_req'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + def model_instance_versions_download(self, owner_slug, model_slug, framework, instance_slug, version_number, **kwargs): # noqa: E501 """Download model instance version files # noqa: E501 @@ -3824,7 +4211,7 @@ def models_list(self, **kwargs): # noqa: E501 :param str search: Search terms :param str sort_by: Sort the results :param str owner: Display models by a specific user or organization - :param int page_size: Page size + :param int page_size: Number of items per page (default 20) :param str page_token: Page token for pagination :return: Result If the method is called asynchronously, @@ -3849,7 +4236,7 @@ def models_list_with_http_info(self, **kwargs): # noqa: E501 :param str search: Search terms :param str sort_by: Sort the results :param str owner: Display models by a specific user or organization - :param int page_size: Page size + :param int page_size: Number of items per page (default 20) :param str page_token: Page token for pagination :return: Result If the method is called asynchronously, diff --git a/kaggle/api/kaggle_api_extended.py b/kaggle/api/kaggle_api_extended.py index 886ca66..47f8211 100644 --- a/kaggle/api/kaggle_api_extended.py +++ b/kaggle/api/kaggle_api_extended.py @@ -99,7 +99,6 @@ class DirectoryArchive(object): - def __init__(self, fullpath, format): self._fullpath = fullpath self._format = format @@ -109,8 +108,9 @@ def __init__(self, fullpath, format): def __enter__(self): self._temp_dir = tempfile.mkdtemp() _, dir_name = os.path.split(self._fullpath) - self.path = shutil.make_archive(os.path.join(self._temp_dir, dir_name), - self._format, self._fullpath) + self.path = shutil.make_archive( + os.path.join(self._temp_dir, dir_name), self._format, + self._fullpath) _, self.name = os.path.split(self.path) return self @@ -119,7 +119,6 @@ def __exit__(self, *args): class ResumableUploadContext(object): - def __init__(self, no_resume=False): self.no_resume = no_resume self._temp_dir = os.path.join(tempfile.gettempdir(), '.kaggle/uploads') @@ -365,7 +364,6 @@ def with_retry(self, initial_delay_millis=500, retry_multiplier=1.7, randomness_factor=0.5): - def retriable_func(*args): for i in range(1, max_retries + 1): try: @@ -375,9 +373,8 @@ def retriable_func(*args): total_delay = self._calculate_backoff_delay( i, initial_delay_millis, retry_multiplier, randomness_factor) - print( - 'Request failed: %s. Will retry in %2.1f seconds' % - (e, total_delay)) + print('Request failed: %s. Will retry in %2.1f seconds' + % (e, total_delay)) time.sleep(total_delay) continue raise @@ -405,8 +402,8 @@ def authenticate(self): or self.CONFIG_NAME_KEY not in config_data: if os.path.exists(self.config): config_data = self.read_config_file(config_data) - elif self._is_help_or_version_command(api_command) or (len( - sys.argv) > 2 and api_command.startswith( + elif self._is_help_or_version_command(api_command) or ( + len(sys.argv) > 2 and api_command.startswith( self.command_prefixes_allowing_anonymous_access)): # Some API commands should be allowed without authentication. return @@ -484,8 +481,8 @@ def _load_config(self, config_data): # Cert File if self.CONFIG_NAME_SSL_CA_CERT in config_data: - configuration.ssl_ca_cert = config_data[ - self.CONFIG_NAME_SSL_CA_CERT] + configuration.ssl_ca_cert = config_data[self. + CONFIG_NAME_SSL_CA_CERT] # Keep config values with class instance, and load api client! @@ -706,11 +703,12 @@ def competitions_list(self, str(self.valid_competition_sort_by)) competitions_list_result = self.process_response( - self.competitions_list_with_http_info(group=group or '', - category=category or '', - sort_by=sort_by or '', - page=page, - search=search or '')) + self.competitions_list_with_http_info( + group=group or '', + category=category or '', + sort_by=sort_by or '', + page=page, + search=search or '')) return [Competition(c) for c in competitions_list_result] def competitions_list_cli(self, @@ -731,11 +729,12 @@ def competitions_list_cli(self, search: a search term to use (default is empty string) csv_display: if True, print comma separated values """ - competitions = self.competitions_list(group=group, - category=category, - sort_by=sort_by, - page=page, - search=search) + competitions = self.competitions_list( + group=group, + category=category, + sort_by=sort_by, + page=page, + search=search) fields = [ 'ref', 'deadline', 'category', 'reward', 'teamCount', 'userHasEntered' @@ -788,9 +787,8 @@ def competition_submit(self, file_name, message, competition, quiet=False): upload_result_token = upload_result['token'] else: # New submissions path! - upload_status = self.upload_complete(file_name, - url_result['createUrl'], - quiet) + upload_status = self.upload_complete( + file_name, url_result['createUrl'], quiet) if upload_status != ResumableUploadResult.COMPLETE: # Actual error is printed during upload_complete. Not # ideal but changing would not be backwards compatible @@ -831,12 +829,17 @@ def competition_submit_cli(self, raise e return submit_result - def competition_submissions(self, competition): + def competition_submissions(self, + competition, + page_token=None, + page_size=20): """ get the list of Submission for a particular competition Parameters ========== competition: the name of the competition + page_token: token for pagination + page_size: the number of items per page """ submissions_result = self.process_response( self.competitions_submissions_list_with_http_info(id=competition)) @@ -846,6 +849,8 @@ def competition_submissions_cli(self, competition=None, competition_opt=None, csv_display=False, + page_token=None, + page_size=20, quiet=False): """ wrapper to competition_submission, will return either json or csv to the user. Additional parameters are listed below, see @@ -856,6 +861,8 @@ def competition_submissions_cli(self, competition: the name of the competition. If None, look to config competition_opt: an alternative competition option provided by cli csv_display: if True, print comma separated values + page_token: token for pagination + page_size: the number of items per page quiet: suppress verbose output (default is False) """ competition = competition or competition_opt @@ -867,7 +874,8 @@ def competition_submissions_cli(self, if competition is None: raise ValueError('No competition specified') else: - submissions = self.competition_submissions(competition) + submissions = self.competition_submissions(competition, page_token, + page_size) fields = [ 'fileName', 'date', 'description', 'status', 'publicScore', 'privateScore' @@ -880,20 +888,28 @@ def competition_submissions_cli(self, else: print('No submissions found') - def competition_list_files(self, competition): + def competition_list_files(self, + competition, + page_token=None, + page_size=20): """ list files for competition Parameters ========== competition: the name of the competition + page_token: the page token for pagination + page_size: the number of items per page """ competition_list_files_result = self.process_response( - self.competitions_data_list_files_with_http_info(id=competition)) - return [File(f) for f in competition_list_files_result] + self.competitions_data_list_files_with_http_info( + id=competition, page_token=page_token, page_size=page_size)) + return FileList(competition_list_files_result) def competition_list_files_cli(self, competition, competition_opt=None, csv_display=False, + page_token=None, + page_size=20, quiet=False): """ List files for a competition, if it exists @@ -902,6 +918,8 @@ def competition_list_files_cli(self, competition: the name of the competition. If None, look to config competition_opt: an alternative competition option provided by cli csv_display: if True, print comma separated values + page_token: the page token for pagination + page_size: the number of items per page quiet: suppress verbose output (default is False) """ competition = competition or competition_opt @@ -913,13 +931,17 @@ def competition_list_files_cli(self, if competition is None: raise ValueError('No competition specified') else: - files = self.competition_list_files(competition) + result = self.competition_list_files(competition, page_token, + page_size) + next_page_token = result.nextPageToken + if next_page_token: + print('Next Page Token = {}'.format(next_page_token)) fields = ['name', 'size', 'creationDate'] - if files: + if result: if csv_display: - self.print_csv(files, fields) + self.print_csv(result.files, fields) else: - self.print_table(files, fields) + self.print_table(result.files, fields) else: print('No files found') @@ -1141,9 +1163,8 @@ def dataset_list(self, ) if file_type and file_type not in self.valid_dataset_file_types: - raise ValueError( - 'Invalid file type specified. Valid options are ' + - str(self.valid_dataset_file_types)) + raise ValueError('Invalid file type specified. Valid options are ' + + str(self.valid_dataset_file_types)) if license_name and license_name not in self.valid_dataset_license_names: raise ValueError('Invalid license specified. Valid options are ' + @@ -1169,17 +1190,18 @@ def dataset_list(self, group = 'user' datasets_list_result = self.process_response( - self.datasets_list_with_http_info(group=group, - sort_by=sort_by or 'hottest', - size=size, - filetype=file_type or 'all', - license=license_name or 'all', - tagids=tag_ids or '', - search=search or '', - user=user or '', - page=page, - max_size=max_size, - min_size=min_size)) + self.datasets_list_with_http_info( + group=group, + sort_by=sort_by or 'hottest', + size=size, + filetype=file_type or 'all', + license=license_name or 'all', + tagids=tag_ids or '', + search=search or '', + user=user or '', + page=page, + max_size=max_size, + min_size=min_size)) return [Dataset(d) for d in datasets_list_result] def dataset_list_cli(self, @@ -1305,12 +1327,14 @@ def dataset_metadata_cli(self, dataset, path, update, dataset_opt=None): meta_file = self.dataset_metadata(dataset, path) print('Downloaded metadata to ' + meta_file) - def dataset_list_files(self, dataset): + def dataset_list_files(self, dataset, page_token=None, page_size=20): """ list files for a dataset Parameters ========== dataset: the string identified of the dataset should be in format [owner]/[dataset-name] + page_token: the page token for pagination + page_size: the number of items per page """ if dataset is None: raise ValueError('A dataset must be specified') @@ -1321,13 +1345,17 @@ def dataset_list_files(self, dataset): self.datasets_list_files_with_http_info( owner_slug=owner_slug, dataset_slug=dataset_slug, - dataset_version_number=dataset_version_number)) + dataset_version_number=dataset_version_number, + page_token=page_token, + page_size=page_size)) return ListFilesResult(dataset_list_files_result) def dataset_list_files_cli(self, dataset, dataset_opt=None, - csv_display=False): + csv_display=False, + page_token=None, + page_size=20): """ a wrapper to dataset_list_files for the client (list files for a dataset) Parameters @@ -1336,13 +1364,19 @@ def dataset_list_files_cli(self, should be in format [owner]/[dataset-name] dataset_opt: an alternative option to providing a dataset csv_display: if True, print comma separated values instead of table + page_token: the page token for pagination + page_size: the number of items per page """ dataset = dataset or dataset_opt - result = self.dataset_list_files(dataset) + result = self.dataset_list_files(dataset, page_token, page_size) + if result: if result.error_message: print(result.error_message) else: + next_page_token = result.nextPageToken + if next_page_token: + print('Next Page Token = {}'.format(next_page_token)) fields = ['name', 'size', 'creationDate'] if csv_display: self.print_csv(result.files, fields) @@ -1369,8 +1403,8 @@ def dataset_status(self, dataset): owner_slug = self.get_config_value(self.CONFIG_NAME_USER) dataset_slug = dataset dataset_status_result = self.process_response( - self.datasets_status_with_http_info(owner_slug=owner_slug, - dataset_slug=dataset_slug)) + self.datasets_status_with_http_info( + owner_slug=owner_slug, dataset_slug=dataset_slug)) return dataset_status_result def dataset_status_cli(self, dataset, dataset_opt=None): @@ -1552,19 +1586,21 @@ def dataset_download_cli(self, ] if file_name is None: - self.dataset_download_files(dataset, - path=path, - unzip=unzip, - force=force, - quiet=quiet, - licenses=licenses) + self.dataset_download_files( + dataset, + path=path, + unzip=unzip, + force=force, + quiet=quiet, + licenses=licenses) else: - self.dataset_download_file(dataset, - file_name, - path=path, - force=force, - quiet=quiet, - licenses=licenses) + self.dataset_download_file( + dataset, + file_name, + path=path, + force=force, + quiet=quiet, + licenses=licenses) def _upload_blob(self, path, quiet, blob_type, upload_context): """ upload a file @@ -1725,9 +1761,8 @@ def dataset_create_version_cli(self, ('The following are not valid tags and could not be added to ' 'the dataset: ') + str(result.invalidTags)) elif result.status.lower() == 'ok': - print( - 'Dataset version is being created. Please check progress at ' + - result.url) + print('Dataset version is being created. Please check progress at ' + + result.url) else: print('Dataset version creation error: ' + result.error) @@ -1794,8 +1829,8 @@ def dataset_create_new(self, dataset_slug = ref_list[1] # validations - if ref == self.config_values[ - self.CONFIG_NAME_USER] + '/INSERT_SLUG_HERE': + if ref == self.config_values[self. + CONFIG_NAME_USER] + '/INSERT_SLUG_HERE': raise ValueError( 'Default slug detected, please change values before uploading') if title == 'INSERT_TITLE_HERE': @@ -1823,16 +1858,17 @@ def dataset_create_new(self, raise ValueError( 'Subtitle length must be between 20 and 80 characters') - request = DatasetNewRequest(title=title, - slug=dataset_slug, - owner_slug=owner_slug, - license_name=license_name, - subtitle=subtitle, - description=description, - files=[], - is_private=not public, - convert_to_csv=convert_to_csv, - category_ids=keywords) + request = DatasetNewRequest( + title=title, + slug=dataset_slug, + owner_slug=owner_slug, + license_name=license_name, + subtitle=subtitle, + description=description, + files=[], + is_private=not public, + convert_to_csv=convert_to_csv, + category_ids=keywords) with ResumableUploadContext() as upload_context: self.upload_files(request, resources, folder, ApiBlobType.DATASET, @@ -1927,28 +1963,30 @@ def download_file(self, headers={'Range': 'bytes=%d-' % (size_read, )}, _preload_content=False) - with tqdm(total=size, - initial=size_read, - unit='B', - unit_scale=True, - unit_divisor=1024, - disable=quiet) as pbar: + with tqdm( + total=size, + initial=size_read, + unit='B', + unit_scale=True, + unit_divisor=1024, + disable=quiet) as pbar: with open(outfile, open_mode) as out: while True: data = response.read(chunk_size) if not data: break out.write(data) - os.utime(outfile, - times=(remote_date_timestamp - 1, - remote_date_timestamp - 1)) + os.utime( + outfile, + times=(remote_date_timestamp - 1, + remote_date_timestamp - 1)) size_read = min(size, size_read + chunk_size) pbar.update(len(data)) if not quiet: print('\n', end='') - os.utime(outfile, - times=(remote_date_timestamp, remote_date_timestamp)) + os.utime( + outfile, times=(remote_date_timestamp, remote_date_timestamp)) def kernels_list(self, page=1, @@ -2019,18 +2057,19 @@ def kernels_list(self, group = 'profile' kernels_list_result = self.process_response( - self.kernels_list_with_http_info(page=page, - page_size=page_size, - group=group, - user=user or '', - language=language or 'all', - kernel_type=kernel_type or 'all', - output_type=output_type or 'all', - sort_by=sort_by or 'hotness', - dataset=dataset or '', - competition=competition or '', - parent_kernel=parent_kernel or '', - search=search or '')) + self.kernels_list_with_http_info( + page=page, + page_size=page_size, + group=group, + user=user or '', + language=language or 'all', + kernel_type=kernel_type or 'all', + output_type=output_type or 'all', + sort_by=sort_by or 'hotness', + dataset=dataset or '', + competition=competition or '', + parent_kernel=parent_kernel or '', + search=search or '')) return [Kernel(k) for k in kernels_list_result] def kernels_list_cli(self, @@ -2053,18 +2092,19 @@ def kernels_list_cli(self, ========== csv_display: if True, print comma separated values instead of table """ - kernels = self.kernels_list(page=page, - page_size=page_size, - search=search, - mine=mine, - dataset=dataset, - competition=competition, - parent_kernel=parent, - user=user, - language=language, - kernel_type=kernel_type, - output_type=output_type, - sort_by=sort_by) + kernels = self.kernels_list( + page=page, + page_size=page_size, + search=search, + mine=mine, + dataset=dataset, + competition=competition, + parent_kernel=parent, + user=user, + language=language, + kernel_type=kernel_type, + output_type=output_type, + sort_by=sort_by) fields = ['ref', 'title', 'author', 'lastRunTime', 'totalVotes'] if kernels: if csv_display: @@ -2074,6 +2114,65 @@ def kernels_list_cli(self, else: print('Not found') + def kernels_list_files(self, kernel, page_token=None, page_size=20): + """ list files for a kernel + Parameters + ========== + kernel: the string identifier of the kernel + should be in format [owner]/[kernel-name] + page_token: the page token for pagination + page_size: the number of items per page + """ + if kernel is None: + raise ValueError('A kernel must be specified') + user_name, kernel_slug, kernel_version_number = self.split_dataset_string( + kernel) + + kernels_list_files_result = self.process_response( + self.kernels_list_files_with_http_info( + kernel_slug=kernel_slug, + user_name=user_name, + page_token=page_token, + page_size=page_size)) + return FileList(kernels_list_files_result) + + def kernels_list_files_cli(self, + kernel, + kernel_opt=None, + csv_display=False, + page_token=None, + page_size=20): + """ a wrapper to kernel_list_files for the client + (list files for a kernel) + Parameters + ========== + kernel: the string identifier of the kernel + should be in format [owner]/[kernel-name] + kernel_opt: an alternative option to providing a kernel + csv_display: if True, print comma separated values instead of table + page_token: the page token for pagination + page_size: the number of items per page + """ + kernel = kernel or kernel_opt + result = self.kernels_list_files(kernel, page_token, page_size) + + if result is None: + print('No files found') + return + + if result.error_message: + print(result.error_message) + return + + next_page_token = result.nextPageToken + if next_page_token: + print('Next Page Token = {}'.format(next_page_token)) + fields = ['name', 'size', 'creationDate'] + if csv_display: + self.print_csv(result.files, fields) + else: + self.print_table(result.files, fields) + def kernels_initialize(self, folder): """ create a new kernel in a specified folder from template, including json metadata that grabs values from the configuration. @@ -2097,11 +2196,11 @@ def kernels_initialize(self, folder): 'code_file': 'INSERT_CODE_FILE_PATH_HERE', 'language': - 'Pick one of: {' + - ','.join(x for x in self.valid_push_language_types) + '}', + 'Pick one of: {' + ','.join( + x for x in self.valid_push_language_types) + '}', 'kernel_type': - 'Pick one of: {' + - ','.join(x for x in self.valid_push_kernel_types) + '}', + 'Pick one of: {' + ','.join( + x for x in self.valid_push_kernel_types) + '}', 'is_private': 'true', 'enable_gpu': @@ -2211,9 +2310,8 @@ def kernels_push(self, folder): for source in model_sources: self.validate_model_string(source) - docker_pinning_type = self.get_or_default(meta_data, - 'docker_image_pinning_type', - None) + docker_pinning_type = self.get_or_default( + meta_data, 'docker_image_pinning_type', None) if (docker_pinning_type is not None and docker_pinning_type not in self.valid_push_pinning_types): raise ValueError( @@ -2387,8 +2485,8 @@ def kernels_pull(self, kernel, path, metadata=False, quiet=True): if file_name is None: print( 'Unknown language %s + kernel type %s - please report this ' - 'on the kaggle-api github issues' % - (language, kernel_type)) + 'on the kaggle-api github issues' % (language, + kernel_type)) print( 'Saving as a python file, even though this may not be the ' 'correct language') @@ -2444,10 +2542,8 @@ def kernels_pull_cli(self, """ client wrapper for kernels_pull """ kernel = kernel or kernel_opt - effective_path = self.kernels_pull(kernel, - path=path, - metadata=metadata, - quiet=False) + effective_path = self.kernels_pull( + kernel, path=path, metadata=metadata, quiet=False) if metadata: print('Source code and metadata downloaded to ' + effective_path) else: @@ -2629,14 +2725,15 @@ def model_list(self, raise ValueError('Page size must be >= 1') models_list_result = self.process_response( - self.models_list_with_http_info(sort_by=sort_by or 'hotness', - search=search or '', - owner=owner or '', - page_size=page_size, - page_token=page_token)) + self.models_list_with_http_info( + sort_by=sort_by or 'hotness', + search=search or '', + owner=owner or '', + page_size=page_size, + page_token=page_token)) next_page_token = models_list_result['nextPageToken'] - if next_page_token != '': + if next_page_token: print('Next Page Token = {}'.format(next_page_token)) return [Model(m) for m in models_list_result['models']] @@ -2680,12 +2777,18 @@ def model_initialize(self, folder): raise ValueError('Invalid folder: ' + folder) meta_data = { - 'ownerSlug': 'INSERT_OWNER_SLUG_HERE', - 'title': 'INSERT_TITLE_HERE', - 'slug': 'INSERT_SLUG_HERE', - 'subtitle': '', - 'isPrivate': True, - 'description': '''# Model Summary + 'ownerSlug': + 'INSERT_OWNER_SLUG_HERE', + 'title': + 'INSERT_TITLE_HERE', + 'slug': + 'INSERT_SLUG_HERE', + 'subtitle': + '', + 'isPrivate': + True, + 'description': + '''# Model Summary # Model Characteristics @@ -2693,8 +2796,10 @@ def model_initialize(self, folder): # Evaluation Results ''', - 'publishTime': '', - 'provenanceSources': '' + 'publishTime': + '', + 'provenanceSources': + '' } meta_file = os.path.join(folder, self.MODEL_METADATA_FILE) with open(meta_file, 'w') as f: @@ -2748,14 +2853,15 @@ def model_create_new(self, folder): if publish_time: self.validate_date(publish_time) - request = ModelNewRequest(owner_slug=owner_slug, - slug=slug, - title=title, - subtitle=subtitle, - is_private=is_private, - description=description, - publish_time=publish_time, - provenance_sources=provenance_sources) + request = ModelNewRequest( + owner_slug=owner_slug, + slug=slug, + title=title, + subtitle=subtitle, + is_private=is_private, + description=description, + publish_time=publish_time, + provenance_sources=provenance_sources) result = ModelNewResponse( self.process_response( self.models_create_new_with_http_info(request))) @@ -2867,13 +2973,14 @@ def model_update(self, folder): if provenance_sources != None: update_mask['paths'].append('provenance_sources') - request = ModelUpdateRequest(title=title, - subtitle=subtitle, - is_private=is_private, - description=description, - publish_time=publish_time, - provenance_sources=provenance_sources, - update_mask=update_mask) + request = ModelUpdateRequest( + title=title, + subtitle=subtitle, + is_private=is_private, + description=description, + publish_time=publish_time, + provenance_sources=provenance_sources, + update_mask=update_mask) result = ModelNewResponse( self.process_response( self.update_model_with_http_info(owner_slug, slug, request))) @@ -2965,12 +3072,18 @@ def model_instance_initialize(self, folder): raise ValueError('Invalid folder: ' + folder) meta_data = { - 'ownerSlug': 'INSERT_OWNER_SLUG_HERE', - 'modelSlug': 'INSERT_EXISTING_MODEL_SLUG_HERE', - 'instanceSlug': 'INSERT_INSTANCE_SLUG_HERE', - 'framework': 'INSERT_FRAMEWORK_HERE', - 'overview': '', - 'usage': '''# Model Format + 'ownerSlug': + 'INSERT_OWNER_SLUG_HERE', + 'modelSlug': + 'INSERT_EXISTING_MODEL_SLUG_HERE', + 'instanceSlug': + 'INSERT_INSTANCE_SLUG_HERE', + 'framework': + 'INSERT_FRAMEWORK_HERE', + 'overview': + '', + 'usage': + '''# Model Format # Training Data @@ -2984,12 +3097,17 @@ def model_instance_initialize(self, folder): # Changelog ''', - 'licenseName': 'Apache 2.0', - 'fineTunable': False, + 'licenseName': + 'Apache 2.0', + 'fineTunable': + False, 'trainingData': [], - 'modelInstanceType': 'Unspecified', - 'baseModelInstanceId': 0, - 'externalBaseModelUrl': '' + 'modelInstanceType': + 'Unspecified', + 'baseModelInstanceId': + 0, + 'externalBaseModelUrl': + '' } meta_file = os.path.join(folder, self.MODEL_INSTANCE_METADATA_FILE) with open(meta_file, 'w') as f: @@ -3029,14 +3147,12 @@ def model_instance_create(self, folder, quiet=False, dir_mode='skip'): license_name = self.get_or_fail(meta_data, 'licenseName') fine_tunable = self.get_or_default(meta_data, 'fineTunable', False) training_data = self.get_or_default(meta_data, 'trainingData', []) - model_instance_type = self.get_or_default(meta_data, - 'modelInstanceType', - 'Unspecified') + model_instance_type = self.get_or_default( + meta_data, 'modelInstanceType', 'Unspecified') base_model_instance = self.get_or_default(meta_data, 'baseModelInstance', '') - external_base_model_url = self.get_or_default(meta_data, - 'externalBaseModelUrl', - '') + external_base_model_url = self.get_or_default( + meta_data, 'externalBaseModelUrl', '') # validations if owner_slug == 'INSERT_OWNER_SLUG_HERE': @@ -3081,9 +3197,8 @@ def model_instance_create(self, folder, quiet=False, dir_mode='skip'): result = ModelNewResponse( self.process_response( self.with_retry( - self.models_create_instance_with_http_info)(owner_slug, - model_slug, - request))) + self.models_create_instance_with_http_info)( + owner_slug, model_slug, request))) return result @@ -3143,6 +3258,73 @@ def model_instance_delete_cli(self, model_instance, yes): else: print('The model instance was deleted.') + def model_instance_files(self, + model_instance, + page_token=None, + page_size=20, + csv_display=False): + """ list all files for the current version of a model instance + + Parameters + ========== + model_instance: the string identifier of the model instance + should be in format [owner]/[model-name]/[framework]/[instance-slug] + page_token: token for pagination + page_size: the number of items per page + csv_display: if True, print comma separated values instead of table + """ + if model_instance is None: + raise ValueError('A model_instance must be specified') + + self.validate_model_instance_string(model_instance) + urls = model_instance.split('/') + [owner_slug, model_slug, framework, instance_slug] = urls + + response = self.process_response( + self.model_instance_files_with_http_info( + owner_slug=owner_slug, + model_slug=model_slug, + framework=framework, + instance_slug=instance_slug, + page_size=page_size, + page_token=page_token, + _preload_content=True)) + + if response: + next_page_token = response['nextPageToken'] + if next_page_token: + print('Next Page Token = {}'.format(next_page_token)) + return FileList(response) + else: + print('No files found') + + def model_instance_files_cli(self, + model_instance, + page_token=None, + page_size=20, + csv_display=False): + """ client wrapper for model_instance_files. + + Parameters + ========== + model_instance: the string identified of the model instance version + should be in format [owner]/[model-name]/[framework]/[instance-slug] + page_token: token for pagination + page_size: the number of items per page + csv_display: if True, print comma separated values instead of table + """ + result = self.model_instance_files( + model_instance, + page_token=page_token, + page_size=page_size, + csv_display=csv_display) + if result and result.files is not None: + fields = ['name', 'size', 'creationDate'] + if csv_display: + self.print_csv(result.files, fields) + else: + self.print_table(result.files, fields) + def model_instance_update(self, folder): """ update a model instance. Parameters @@ -3170,9 +3352,8 @@ def model_instance_update(self, folder): 'modelInstanceType', None) base_model_instance = self.get_or_default(meta_data, 'baseModelInstance', None) - external_base_model_url = self.get_or_default(meta_data, - 'externalBaseModelUrl', - None) + external_base_model_url = self.get_or_default( + meta_data, 'externalBaseModelUrl', None) # validations if owner_slug == 'INSERT_OWNER_SLUG_HERE': @@ -3271,8 +3452,8 @@ def model_instance_version_create(self, owner_slug, model_slug, framework, instance_slug = self.split_model_instance_string( model_instance) - request = ModelInstanceNewVersionRequest(version_notes=version_notes, - files=[]) + request = ModelInstanceNewVersionRequest( + version_notes=version_notes, files=[]) with ResumableUploadContext() as upload_context: self.upload_files(request, None, folder, ApiBlobType.MODEL, @@ -3302,9 +3483,8 @@ def model_instance_version_create_cli(self, quiet: suppress verbose output (default is False) dir_mode: what to do with directories: "skip" - ignore; "zip" - compress and upload """ - result = self.model_instance_version_create(model_instance, folder, - version_notes, quiet, - dir_mode) + result = self.model_instance_version_create( + model_instance, folder, version_notes, quiet, dir_mode) if result.hasId: print('Your model instance version was created. Url={}'.format( @@ -3396,11 +3576,81 @@ def model_instance_version_download_cli(self, quiet: suppress verbose output (default is False) untar: if True, untar files upon download (default is False) """ - return self.model_instance_version_download(model_instance_version, - path=path, - untar=untar, - force=force, - quiet=quiet) + return self.model_instance_version_download( + model_instance_version, + path=path, + untar=untar, + force=force, + quiet=quiet) + + def model_instance_version_files(self, + model_instance_version, + page_token=None, + page_size=20, + csv_display=False): + """ list all files for a model instance version + + Parameters + ========== + model_instance_version: the string identifier of the model instance version + should be in format [owner]/[model-name]/[framework]/[instance-slug]/[version-number] + page_token: token for pagination + page_size: the number of items per page + csv_display: if True, print comma separated values instead of table + """ + if model_instance_version is None: + raise ValueError('A model_instance_version must be specified') + + self.validate_model_instance_version_string(model_instance_version) + urls = model_instance_version.split('/') + [owner_slug, model_slug, framework, instance_slug, + version_number] = urls + + response = self.process_response( + self.model_instance_version_files_with_http_info( + owner_slug=owner_slug, + model_slug=model_slug, + framework=framework, + instance_slug=instance_slug, + version_number=version_number, + page_size=page_size, + page_token=page_token, + _preload_content=True)) + + if response: + next_page_token = response['nextPageToken'] + if next_page_token: + print('Next Page Token = {}'.format(next_page_token)) + return FileList(response) + else: + print('No files found') + + def model_instance_version_files_cli(self, + model_instance_version, + page_token=None, + page_size=20, + csv_display=False): + """ client wrapper for model_instance_version_files. + + Parameters + ========== + model_instance_version: the string identified of the model instance version + should be in format [owner]/[model-name]/[framework]/[instance-slug]/[version-number] + page_token: token for pagination + page_size: the number of items per page + csv_display: if True, print comma separated values instead of table + """ + result = self.model_instance_version_files( + model_instance_version, + page_token=page_token, + page_size=page_size, + csv_display=csv_display) + if result and result.files is not None: + fields = ['name', 'size', 'creationDate'] + if csv_display: + self.print_csv(result.files, fields) + else: + self.print_table(result.files, fields) def model_instance_version_delete(self, model_instance_version, yes): """ call to delete a model instance version from the API @@ -3459,9 +3709,8 @@ def files_upload_cli(self, local_paths, inbox_path, no_resume, files_to_create = [] with ResumableUploadContext(no_resume) as upload_context: for local_path in local_paths: - (upload_file, - file_name) = self.file_upload_cli(local_path, inbox_path, - no_compress, upload_context) + (upload_file, file_name) = self.file_upload_cli( + local_path, inbox_path, no_compress, upload_context) if upload_file is None: continue @@ -3483,10 +3732,9 @@ def file_upload_cli(self, local_path, inbox_path, no_compress, file_or_folder_name = os.path.basename(full_path) dir_mode = 'tar' if no_compress else 'zip' - upload_file = self._upload_file_or_folder(parent_path, - file_or_folder_name, - ApiBlobType.INBOX, - upload_context, dir_mode) + upload_file = self._upload_file_or_folder( + parent_path, file_or_folder_name, ApiBlobType.INBOX, + upload_context, dir_mode) return (upload_file, file_or_folder_name) def print_obj(self, obj, indent=2): @@ -3533,9 +3781,11 @@ def print_table(self, items, fields): """ formats = [] borders = [] + if len(items) == 0: + return for f in fields: - length = max(len(f), - max([len(self.string(getattr(i, f))) for i in items])) + length = max( + len(f), max([len(self.string(getattr(i, f))) for i in items])) justify = '>' if isinstance(getattr( items[0], f), int) or f == 'size' or f == 'reward' else '<' formats.append('{:' + justify + self.string(length + 2) + '}') @@ -3687,10 +3937,9 @@ def upload_files(self, self.MODEL_INSTANCE_METADATA_FILE ]): continue - upload_file = self._upload_file_or_folder(folder, file_name, - blob_type, - upload_context, dir_mode, - quiet, resources) + upload_file = self._upload_file_or_folder( + folder, file_name, blob_type, upload_context, dir_mode, quiet, + resources) if upload_file is not None: request.files.append(upload_file) @@ -3772,9 +4021,9 @@ def process_column(self, column): ========== column: a list of values in a column to be processed """ - processed_column = DatasetColumn(name=self.get_or_fail(column, 'name'), - description=self.get_or_default( - column, 'description', '')) + processed_column = DatasetColumn( + name=self.get_or_fail(column, 'name'), + description=self.get_or_default(column, 'description', '')) if 'type' in column: original_type = column['type'].lower() processed_column.original_type = original_type @@ -3818,11 +4067,12 @@ def upload_complete(self, path, url, quiet, resume=False): start_at = resumable_upload_result.start_at upload_size = file_size - start_at - with tqdm(total=upload_size, - unit='B', - unit_scale=True, - unit_divisor=1024, - disable=quiet) as progress_bar: + with tqdm( + total=upload_size, + unit='B', + unit_scale=True, + unit_divisor=1024, + disable=quiet) as progress_bar: with io.open(path, 'rb', buffering=0) as fp: session = requests.Session() if start_at > 0: @@ -3831,8 +4081,8 @@ def upload_complete(self, path, url, quiet, resume=False): 'Content-Length': '%d' % upload_size, 'Content-Range': - 'bytes %d-%d/%d' % - (start_at, file_size - 1, file_size) + 'bytes %d-%d/%d' % (start_at, file_size - 1, + file_size) }) reader = TqdmBufferedReader(fp, progress_bar) retries = Retry(total=10, backoff_factor=0.5) @@ -3938,7 +4188,7 @@ def split_dataset_string(self, dataset): else: return urls[0], urls[1], None else: - return self.get_config_value(self.CONFIG_NAME_USER), dataset + return self.get_config_value(self.CONFIG_NAME_USER), dataset, None def validate_model_string(self, model): """ determine if a model string is valid, meaning it is in the format @@ -4156,7 +4406,6 @@ def confirmation(self): class TqdmBufferedReader(io.BufferedReader): - def __init__(self, raw, progress_bar): """ helper class to implement an io.BufferedReader Parameters @@ -4183,3 +4432,24 @@ def increment(self, length): length: bytes to increment the reader by """ self.progress_bar.update(length) + + +class FileList(object): + def __init__(self, init_dict): + self.error_message = '' + files = init_dict['files'] + if files: + for f in files: + if 'size' in f: + f['totalBytes'] = f['size'] + self.files = [File(f) for f in files] + else: + self.files = [] + token = init_dict['nextPageToken'] + if token: + self.nextPageToken = token + else: + self.nextPageToken = "" + + def __repr__(self): + return '' diff --git a/kaggle/cli.py b/kaggle/cli.py index 186f39c..a5fea6e 100644 --- a/kaggle/cli.py +++ b/kaggle/cli.py @@ -44,14 +44,14 @@ def main(): parser = argparse.ArgumentParser( formatter_class=argparse.RawTextHelpFormatter) - parser.add_argument('-v', - '--version', - action='version', - version='Kaggle API ' + KaggleApi.__version__) - - subparsers = parser.add_subparsers(title='commands', - help=Help.kaggle, - dest='command') + parser.add_argument( + '-v', + '--version', + action='version', + version='Kaggle API ' + KaggleApi.__version__) + + subparsers = parser.add_subparsers( + title='commands', help=Help.kaggle, dest='command') subparsers.required = True subparsers.choices = Help.kaggle_choices parse_competitions(subparsers) @@ -137,22 +137,25 @@ def parse_competitions(subparsers): dest='sort_by', required=False, help=Help.param_competition_sort_by) - parser_competitions_list_optional.add_argument('-p', - '--page', - dest='page', - default=1, - required=False, - help=Help.param_page) - parser_competitions_list_optional.add_argument('-s', - '--search', - dest='search', - required=False, - help=Help.param_search) - parser_competitions_list_optional.add_argument('-v', - '--csv', - dest='csv_display', - action='store_true', - help=Help.param_csv) + parser_competitions_list_optional.add_argument( + '-p', + '--page', + dest='page', + default=1, + required=False, + help=Help.param_page) + parser_competitions_list_optional.add_argument( + '-s', + '--search', + dest='search', + required=False, + help=Help.param_search) + parser_competitions_list_optional.add_argument( + '-v', + '--csv', + dest='csv_display', + action='store_true', + help=Help.param_csv) parser_competitions_list._action_groups.append( parser_competitions_list_optional) parser_competitions_list.set_defaults(func=api.competitions_list_cli) @@ -166,21 +169,35 @@ def parse_competitions(subparsers): ) parser_competitions_files_optional.add_argument( 'competition', nargs='?', default=None, help=Help.param_competition) - parser_competitions_files_optional.add_argument('-c', - '--competition', - dest='competition_opt', - required=False, - help=argparse.SUPPRESS) - parser_competitions_files_optional.add_argument('-v', - '--csv', - dest='csv_display', - action='store_true', - help=Help.param_csv) - parser_competitions_files_optional.add_argument('-q', - '--quiet', - dest='quiet', - action='store_true', - help=Help.param_quiet) + parser_competitions_files_optional.add_argument( + '-c', + '--competition', + dest='competition_opt', + required=False, + help=argparse.SUPPRESS) + parser_competitions_files_optional.add_argument( + '-v', + '--csv', + dest='csv_display', + action='store_true', + help=Help.param_csv) + parser_competitions_files_optional.add_argument( + '-q', + '--quiet', + dest='quiet', + action='store_true', + help=Help.param_quiet) + parser_competitions_files_optional.add_argument( + '--page-token', + dest='page_token', + required=False, + help=Help.param_page_token) + parser_competitions_files_optional.add_argument( + '--page-size', + dest='page_size', + required=False, + default=20, + help=Help.param_page_size) parser_competitions_files._action_groups.append( parser_competitions_files_optional) parser_competitions_files.set_defaults(func=api.competition_list_files_cli) @@ -194,11 +211,12 @@ def parse_competitions(subparsers): ) parser_competitions_download_optional.add_argument( 'competition', nargs='?', default=None, help=Help.param_competition) - parser_competitions_download_optional.add_argument('-c', - '--competition', - dest='competition_opt', - required=False, - help=argparse.SUPPRESS) + parser_competitions_download_optional.add_argument( + '-c', + '--competition', + dest='competition_opt', + required=False, + help=argparse.SUPPRESS) parser_competitions_download_optional.add_argument( '-f', '--file', @@ -211,23 +229,26 @@ def parse_competitions(subparsers): dest='path', required=False, help=Help.param_downfolder) - parser_competitions_download_optional.add_argument('-w', - '--wp', - dest='path', - action='store_const', - const='.', - required=False, - help=Help.param_wp) - parser_competitions_download_optional.add_argument('-o', - '--force', - dest='force', - action='store_true', - help=Help.param_force) - parser_competitions_download_optional.add_argument('-q', - '--quiet', - dest='quiet', - action='store_true', - help=Help.param_quiet) + parser_competitions_download_optional.add_argument( + '-w', + '--wp', + dest='path', + action='store_const', + const='.', + required=False, + help=Help.param_wp) + parser_competitions_download_optional.add_argument( + '-o', + '--force', + dest='force', + action='store_true', + help=Help.param_force) + parser_competitions_download_optional.add_argument( + '-q', + '--quiet', + dest='quiet', + action='store_true', + help=Help.param_quiet) parser_competitions_download._action_groups.append( parser_competitions_download_optional) parser_competitions_download.set_defaults( @@ -244,27 +265,30 @@ def parse_competitions(subparsers): 'required arguments') parser_competitions_submit_optional.add_argument( 'competition', nargs='?', default=None, help=Help.param_competition) - parser_competitions_submit_optional.add_argument('-c', - '--competition', - dest='competition_opt', - required=False, - help=argparse.SUPPRESS) - parser_competitions_submit_required.add_argument('-f', - '--file', - dest='file_name', - required=True, - help=Help.param_upfile) + parser_competitions_submit_optional.add_argument( + '-c', + '--competition', + dest='competition_opt', + required=False, + help=argparse.SUPPRESS) + parser_competitions_submit_required.add_argument( + '-f', + '--file', + dest='file_name', + required=True, + help=Help.param_upfile) parser_competitions_submit_required.add_argument( '-m', '--message', dest='message', required=True, help=Help.param_competition_message) - parser_competitions_submit_optional.add_argument('-q', - '--quiet', - dest='quiet', - action='store_true', - help=Help.param_quiet) + parser_competitions_submit_optional.add_argument( + '-q', + '--quiet', + dest='quiet', + action='store_true', + help=Help.param_quiet) parser_competitions_submit._action_groups.append( parser_competitions_submit_optional) parser_competitions_submit.set_defaults(func=api.competition_submit_cli) @@ -284,11 +308,12 @@ def parse_competitions(subparsers): dest='competition_opt', required=False, help=argparse.SUPPRESS) - parser_competitions_submissions_optional.add_argument('-v', - '--csv', - dest='csv_display', - action='store_true', - help=Help.param_csv) + parser_competitions_submissions_optional.add_argument( + '-v', + '--csv', + dest='csv_display', + action='store_true', + help=Help.param_csv) parser_competitions_submissions_optional.add_argument( '-q', '--quiet', @@ -329,11 +354,12 @@ def parse_competitions(subparsers): help=Help.param_competition_leaderboard_download) parser_competitions_leaderboard_optional.add_argument( '-p', '--path', dest='path', help=Help.param_downfolder) - parser_competitions_leaderboard_optional.add_argument('-v', - '--csv', - dest='csv_display', - action='store_true', - help=Help.param_csv) + parser_competitions_leaderboard_optional.add_argument( + '-v', + '--csv', + dest='csv_display', + action='store_true', + help=Help.param_csv) parser_competitions_leaderboard_optional.add_argument( '-q', '--quiet', @@ -358,8 +384,8 @@ def parse_datasets(subparsers): formatter_class=argparse.RawTextHelpFormatter, help=Help.group_datasets, aliases=['d']) - subparsers_datasets = parser_datasets.add_subparsers(title='commands', - dest='command') + subparsers_datasets = parser_datasets.add_subparsers( + title='commands', dest='command') subparsers_datasets.required = True subparsers_datasets.choices = Help.datasets_choices @@ -369,59 +395,58 @@ def parse_datasets(subparsers): formatter_class=argparse.RawTextHelpFormatter, help=Help.command_datasets_list) parser_datasets_list_optional = parser_datasets_list._action_groups.pop() - parser_datasets_list.add_argument('--sort-by', - dest='sort_by', - required=False, - help=Help.param_dataset_sort_by) - parser_datasets_list.add_argument('--size', - dest='size', - required=False, - help=Help.param_dataset_size) - parser_datasets_list.add_argument('--file-type', - dest='file_type', - required=False, - help=Help.param_dataset_file_type) - parser_datasets_list.add_argument('--license', - dest='license_name', - required=False, - help=Help.param_dataset_license) - parser_datasets_list.add_argument('--tags', - dest='tag_ids', - required=False, - help=Help.param_dataset_tags) - parser_datasets_list.add_argument('-s', - '--search', - dest='search', - required=False, - help=Help.param_search) - parser_datasets_list.add_argument('-m', - '--mine', - dest='mine', - action='store_true', - help=Help.param_mine) - parser_datasets_list.add_argument('--user', - dest='user', - required=False, - help=Help.param_dataset_user) - parser_datasets_list.add_argument('-p', - '--page', - dest='page', - default=1, - required=False, - help=Help.param_page) - parser_datasets_list.add_argument('-v', - '--csv', - dest='csv_display', - action='store_true', - help=Help.param_csv) - parser_datasets_list.add_argument('--max-size', - dest='max_size', - required=False, - help=Help.param_dataset_maxsize) - parser_datasets_list.add_argument('--min-size', - dest='min_size', - required=False, - help=Help.param_dataset_minsize) + parser_datasets_list.add_argument( + '--sort-by', + dest='sort_by', + required=False, + help=Help.param_dataset_sort_by) + parser_datasets_list.add_argument( + '--size', dest='size', required=False, help=Help.param_dataset_size) + parser_datasets_list.add_argument( + '--file-type', + dest='file_type', + required=False, + help=Help.param_dataset_file_type) + parser_datasets_list.add_argument( + '--license', + dest='license_name', + required=False, + help=Help.param_dataset_license) + parser_datasets_list.add_argument( + '--tags', dest='tag_ids', required=False, help=Help.param_dataset_tags) + parser_datasets_list.add_argument( + '-s', + '--search', + dest='search', + required=False, + help=Help.param_search) + parser_datasets_list.add_argument( + '-m', '--mine', dest='mine', action='store_true', help=Help.param_mine) + parser_datasets_list.add_argument( + '--user', dest='user', required=False, help=Help.param_dataset_user) + parser_datasets_list.add_argument( + '-p', + '--page', + dest='page', + default=1, + required=False, + help=Help.param_page) + parser_datasets_list.add_argument( + '-v', + '--csv', + dest='csv_display', + action='store_true', + help=Help.param_csv) + parser_datasets_list.add_argument( + '--max-size', + dest='max_size', + required=False, + help=Help.param_dataset_maxsize) + parser_datasets_list.add_argument( + '--min-size', + dest='min_size', + required=False, + help=Help.param_dataset_minsize) parser_datasets_list._action_groups.append(parser_datasets_list_optional) parser_datasets_list.set_defaults(func=api.dataset_list_cli) @@ -431,20 +456,31 @@ def parse_datasets(subparsers): formatter_class=argparse.RawTextHelpFormatter, help=Help.command_datasets_files) parser_datasets_files_optional = parser_datasets_files._action_groups.pop() - parser_datasets_files_optional.add_argument('dataset', - nargs='?', - default=None, - help=Help.param_dataset) - parser_datasets_files_optional.add_argument('-d', - '--dataset', - dest='dataset_opt', - required=False, - help=argparse.SUPPRESS) - parser_datasets_files_optional.add_argument('-v', - '--csv', - dest='csv_display', - action='store_true', - help=Help.param_csv) + parser_datasets_files_optional.add_argument( + 'dataset', nargs='?', default=None, help=Help.param_dataset) + parser_datasets_files_optional.add_argument( + '-d', + '--dataset', + dest='dataset_opt', + required=False, + help=argparse.SUPPRESS) + parser_datasets_files_optional.add_argument( + '-v', + '--csv', + dest='csv_display', + action='store_true', + help=Help.param_csv) + parser_datasets_files_optional.add_argument( + '--page-token', + dest='page_token', + required=False, + help=Help.param_page_token) + parser_datasets_files_optional.add_argument( + '--page-size', + dest='page_size', + required=False, + default=20, + help=Help.param_page_size) parser_datasets_files._action_groups.append(parser_datasets_files_optional) parser_datasets_files.set_defaults(func=api.dataset_list_files_cli) @@ -455,47 +491,48 @@ def parse_datasets(subparsers): help=Help.command_datasets_download) parser_datasets_download_optional = parser_datasets_download._action_groups.pop( ) - parser_datasets_download_optional.add_argument('dataset', - nargs='?', - default=None, - help=Help.param_dataset) - parser_datasets_download_optional.add_argument('-d', - '--dataset', - dest='dataset_opt', - required=False, - help=argparse.SUPPRESS) + parser_datasets_download_optional.add_argument( + 'dataset', nargs='?', default=None, help=Help.param_dataset) + parser_datasets_download_optional.add_argument( + '-d', + '--dataset', + dest='dataset_opt', + required=False, + help=argparse.SUPPRESS) parser_datasets_download_optional.add_argument( '-f', '--file', dest='file_name', required=False, help=Help.param_dataset_file) - parser_datasets_download_optional.add_argument('-p', - '--path', - dest='path', - required=False, - help=Help.param_downfolder) - parser_datasets_download_optional.add_argument('-w', - '--wp', - dest='path', - action='store_const', - const='.', - required=False, - help=Help.param_wp) - parser_datasets_download_optional.add_argument('--unzip', - dest='unzip', - action='store_true', - help=Help.param_unzip) - parser_datasets_download_optional.add_argument('-o', - '--force', - dest='force', - action='store_true', - help=Help.param_force) - parser_datasets_download_optional.add_argument('-q', - '--quiet', - dest='quiet', - action='store_true', - help=Help.param_quiet) + parser_datasets_download_optional.add_argument( + '-p', + '--path', + dest='path', + required=False, + help=Help.param_downfolder) + parser_datasets_download_optional.add_argument( + '-w', + '--wp', + dest='path', + action='store_const', + const='.', + required=False, + help=Help.param_wp) + parser_datasets_download_optional.add_argument( + '--unzip', dest='unzip', action='store_true', help=Help.param_unzip) + parser_datasets_download_optional.add_argument( + '-o', + '--force', + dest='force', + action='store_true', + help=Help.param_force) + parser_datasets_download_optional.add_argument( + '-q', + '--quiet', + dest='quiet', + action='store_true', + help=Help.param_quiet) parser_datasets_download._action_groups.append( parser_datasets_download_optional) parser_datasets_download.set_defaults(func=api.dataset_download_cli) @@ -513,21 +550,24 @@ def parse_datasets(subparsers): dest='folder', required=False, help=Help.param_dataset_upfile) - parser_datasets_create_optional.add_argument('-u', - '--public', - dest='public', - action='store_true', - help=Help.param_public) - parser_datasets_create_optional.add_argument('-q', - '--quiet', - dest='quiet', - action='store_true', - help=Help.param_quiet) - parser_datasets_create_optional.add_argument('-t', - '--keep-tabular', - dest='convert_to_csv', - action='store_false', - help=Help.param_keep_tabular) + parser_datasets_create_optional.add_argument( + '-u', + '--public', + dest='public', + action='store_true', + help=Help.param_public) + parser_datasets_create_optional.add_argument( + '-q', + '--quiet', + dest='quiet', + action='store_true', + help=Help.param_quiet) + parser_datasets_create_optional.add_argument( + '-t', + '--keep-tabular', + dest='convert_to_csv', + action='store_false', + help=Help.param_keep_tabular) parser_datasets_create_optional.add_argument( '-r', '--dir-mode', @@ -560,16 +600,18 @@ def parse_datasets(subparsers): dest='folder', required=False, help=Help.param_dataset_upfile) - parser_datasets_version_optional.add_argument('-q', - '--quiet', - dest='quiet', - action='store_true', - help=Help.param_quiet) - parser_datasets_version_optional.add_argument('-t', - '--keep-tabular', - dest='convert_to_csv', - action='store_false', - help=Help.param_keep_tabular) + parser_datasets_version_optional.add_argument( + '-q', + '--quiet', + dest='quiet', + action='store_true', + help=Help.param_quiet) + parser_datasets_version_optional.add_argument( + '-t', + '--keep-tabular', + dest='convert_to_csv', + action='store_false', + help=Help.param_keep_tabular) parser_datasets_version_optional.add_argument( '-r', '--dir-mode', @@ -593,11 +635,12 @@ def parse_datasets(subparsers): formatter_class=argparse.RawTextHelpFormatter, help=Help.command_datasets_init) parser_datasets_init_optional = parser_datasets_init._action_groups.pop() - parser_datasets_init_optional.add_argument('-p', - '--path', - dest='folder', - required=False, - help=Help.param_dataset_upfile) + parser_datasets_init_optional.add_argument( + '-p', + '--path', + dest='folder', + required=False, + help=Help.param_dataset_upfile) parser_datasets_init._action_groups.append(parser_datasets_init_optional) parser_datasets_init.set_defaults(func=api.dataset_initialize_cli) @@ -608,15 +651,14 @@ def parse_datasets(subparsers): help=Help.command_datasets_metadata) parser_datasets_metadata_optional = parser_datasets_metadata._action_groups.pop( ) - parser_datasets_metadata_optional.add_argument('dataset', - nargs='?', - default=None, - help=Help.param_dataset) - parser_datasets_metadata_optional.add_argument('-d', - '--dataset', - dest='dataset_opt', - required=False, - help=argparse.SUPPRESS) + parser_datasets_metadata_optional.add_argument( + 'dataset', nargs='?', default=None, help=Help.param_dataset) + parser_datasets_metadata_optional.add_argument( + '-d', + '--dataset', + dest='dataset_opt', + required=False, + help=argparse.SUPPRESS) parser_datasets_metadata_optional.add_argument( '--update', dest='update', @@ -635,15 +677,14 @@ def parse_datasets(subparsers): help=Help.command_datasets_status) parser_datasets_status_optional = parser_datasets_status._action_groups.pop( ) - parser_datasets_status_optional.add_argument('dataset', - nargs='?', - default=None, - help=Help.param_dataset) - parser_datasets_status_optional.add_argument('-d', - '--dataset', - dest='dataset_opt', - required=False, - help=argparse.SUPPRESS) + parser_datasets_status_optional.add_argument( + 'dataset', nargs='?', default=None, help=Help.param_dataset) + parser_datasets_status_optional.add_argument( + '-d', + '--dataset', + dest='dataset_opt', + required=False, + help=argparse.SUPPRESS) parser_datasets_status._action_groups.append( parser_datasets_status_optional) parser_datasets_status.set_defaults(func=api.dataset_status_cli) @@ -661,8 +702,8 @@ def parse_kernels(subparsers): formatter_class=argparse.RawTextHelpFormatter, help=Help.group_kernels, aliases=['k']) - subparsers_kernels = parser_kernels.add_subparsers(title='commands', - dest='command') + subparsers_kernels = parser_kernels.add_subparsers( + title='commands', dest='command') subparsers_kernels.required = True subparsers_kernels.choices = Help.kernels_choices @@ -672,77 +713,99 @@ def parse_kernels(subparsers): formatter_class=argparse.RawTextHelpFormatter, help=Help.command_kernels_list) parser_kernels_list_optional = parser_kernels_list._action_groups.pop() - parser_kernels_list_optional.add_argument('-m', - '--mine', - dest='mine', - action='store_true', - help=Help.param_mine) - parser_kernels_list_optional.add_argument('-p', - '--page', - dest='page', - default=1, - help=Help.param_page) - parser_kernels_list_optional.add_argument('--page-size', - dest='page_size', - default=20, - help=Help.param_page_size) - parser_kernels_list_optional.add_argument('-s', - '--search', - dest='search', - help=Help.param_search) - parser_kernels_list_optional.add_argument('-v', - '--csv', - dest='csv_display', - action='store_true', - help=Help.param_csv) - parser_kernels_list_optional.add_argument('--parent', - dest='parent', - required=False, - help=Help.param_kernel_parent) + parser_kernels_list_optional.add_argument( + '-m', '--mine', dest='mine', action='store_true', help=Help.param_mine) + parser_kernels_list_optional.add_argument( + '-p', '--page', dest='page', default=1, help=Help.param_page) + parser_kernels_list_optional.add_argument( + '--page-size', dest='page_size', default=20, help=Help.param_page_size) + parser_kernels_list_optional.add_argument( + '-s', '--search', dest='search', help=Help.param_search) + parser_kernels_list_optional.add_argument( + '-v', + '--csv', + dest='csv_display', + action='store_true', + help=Help.param_csv) + parser_kernels_list_optional.add_argument( + '--parent', + dest='parent', + required=False, + help=Help.param_kernel_parent) parser_kernels_list_optional.add_argument( '--competition', dest='competition', required=False, help=Help.param_kernel_competition) - parser_kernels_list_optional.add_argument('--dataset', - dest='dataset', - required=False, - help=Help.param_kernel_dataset) - parser_kernels_list_optional.add_argument('--user', - dest='user', - required=False, - help=Help.param_kernel_user) - parser_kernels_list_optional.add_argument('--language', - dest='language', - required=False, - help=Help.param_kernel_language) - parser_kernels_list_optional.add_argument('--kernel-type', - dest='kernel_type', - required=False, - help=Help.param_kernel_type) + parser_kernels_list_optional.add_argument( + '--dataset', + dest='dataset', + required=False, + help=Help.param_kernel_dataset) + parser_kernels_list_optional.add_argument( + '--user', dest='user', required=False, help=Help.param_kernel_user) + parser_kernels_list_optional.add_argument( + '--language', + dest='language', + required=False, + help=Help.param_kernel_language) + parser_kernels_list_optional.add_argument( + '--kernel-type', + dest='kernel_type', + required=False, + help=Help.param_kernel_type) parser_kernels_list_optional.add_argument( '--output-type', dest='output_type', required=False, help=Help.param_kernel_output_type) - parser_kernels_list_optional.add_argument('--sort-by', - dest='sort_by', - required=False, - help=Help.param_kernel_sort_by) + parser_kernels_list_optional.add_argument( + '--sort-by', + dest='sort_by', + required=False, + help=Help.param_kernel_sort_by) parser_kernels_list._action_groups.append(parser_kernels_list_optional) parser_kernels_list.set_defaults(func=api.kernels_list_cli) + # Kernels file list + parser_kernels_files = subparsers_kernels.add_parser( + 'files', + formatter_class=argparse.RawTextHelpFormatter, + help=Help.command_kernels_files) + parser_kernels_files_optional = parser_kernels_files._action_groups.pop() + parser_kernels_files_optional.add_argument( + 'kernel', nargs='?', default=None, help=Help.param_kernel) + parser_kernels_files_optional.add_argument( + '-k', + '--kernel', + dest='kernel_opt', + required=False, + help=argparse.SUPPRESS) + parser_kernels_files_optional.add_argument( + '-v', + '--csv', + dest='csv_display', + action='store_true', + help=Help.param_csv) + parser_kernels_files_optional.add_argument( + '--page-token', dest='page_token', help=Help.param_page_token) + parser_kernels_files_optional.add_argument( + '--page-size', dest='page_size', default=20, help=Help.param_page_size) + parser_kernels_files._action_groups.append(parser_kernels_files_optional) + parser_kernels_files.set_defaults(func=api.kernels_list_files_cli) + # Kernels init parser_kernels_init = subparsers_kernels.add_parser( 'init', formatter_class=argparse.RawTextHelpFormatter, help=Help.command_kernels_init) parser_kernels_init_optional = parser_kernels_init._action_groups.pop() - parser_kernels_init_optional.add_argument('-p', - '--path', - dest='folder', - required=False, - help=Help.param_kernel_upfile) + parser_kernels_init_optional.add_argument( + '-p', + '--path', + dest='folder', + required=False, + help=Help.param_kernel_upfile) parser_kernels_init._action_groups.append(parser_kernels_init_optional) parser_kernels_init.set_defaults(func=api.kernels_initialize_cli) @@ -752,11 +815,12 @@ def parse_kernels(subparsers): formatter_class=argparse.RawTextHelpFormatter, help=Help.command_kernels_push) parser_kernels_push_optional = parser_kernels_push._action_groups.pop() - parser_kernels_push_optional.add_argument('-p', - '--path', - dest='folder', - required=False, - help=Help.param_kernel_upfile) + parser_kernels_push_optional.add_argument( + '-p', + '--path', + dest='folder', + required=False, + help=Help.param_kernel_upfile) parser_kernels_push._action_groups.append(parser_kernels_push_optional) parser_kernels_push.set_defaults(func=api.kernels_push_cli) @@ -766,27 +830,28 @@ def parse_kernels(subparsers): formatter_class=argparse.RawTextHelpFormatter, help=Help.command_kernels_pull) parser_kernels_pull_optional = parser_kernels_pull._action_groups.pop() - parser_kernels_pull_optional.add_argument('kernel', - nargs='?', - default=None, - help=Help.param_kernel) - parser_kernels_pull_optional.add_argument('-k', - '--kernel', - dest='kernel', - required=False, - help=argparse.SUPPRESS) - parser_kernels_pull_optional.add_argument('-p', - '--path', - dest='path', - required=False, - help=Help.param_downfolder) - parser_kernels_pull_optional.add_argument('-w', - '--wp', - dest='path', - action='store_const', - const='.', - required=False, - help=Help.param_wp) + parser_kernels_pull_optional.add_argument( + 'kernel', nargs='?', default=None, help=Help.param_kernel) + parser_kernels_pull_optional.add_argument( + '-k', + '--kernel', + dest='kernel', + required=False, + help=argparse.SUPPRESS) + parser_kernels_pull_optional.add_argument( + '-p', + '--path', + dest='path', + required=False, + help=Help.param_downfolder) + parser_kernels_pull_optional.add_argument( + '-w', + '--wp', + dest='path', + action='store_const', + const='.', + required=False, + help=Help.param_wp) parser_kernels_pull_optional.add_argument( '-m', '--metadata', @@ -802,39 +867,42 @@ def parse_kernels(subparsers): formatter_class=argparse.RawTextHelpFormatter, help=Help.command_kernels_output) parser_kernels_output_optional = parser_kernels_output._action_groups.pop() - parser_kernels_output_optional.add_argument('kernel', - nargs='?', - default=None, - help=Help.param_kernel) - parser_kernels_output_optional.add_argument('-k', - '--kernel', - dest='kernel_opt', - required=False, - help=argparse.SUPPRESS) - parser_kernels_output_optional.add_argument('-p', - '--path', - dest='path', - required=False, - help=Help.param_downfolder) - parser_kernels_output_optional.add_argument('-w', - '--wp', - dest='path', - action='store_const', - const='.', - required=False, - help=Help.param_wp) - parser_kernels_output_optional.add_argument('-o', - '--force', - dest='force', - action='store_true', - required=False, - help=Help.param_force) - parser_kernels_output_optional.add_argument('-q', - '--quiet', - dest='quiet', - action='store_true', - required=False, - help=Help.param_quiet) + parser_kernels_output_optional.add_argument( + 'kernel', nargs='?', default=None, help=Help.param_kernel) + parser_kernels_output_optional.add_argument( + '-k', + '--kernel', + dest='kernel_opt', + required=False, + help=argparse.SUPPRESS) + parser_kernels_output_optional.add_argument( + '-p', + '--path', + dest='path', + required=False, + help=Help.param_downfolder) + parser_kernels_output_optional.add_argument( + '-w', + '--wp', + dest='path', + action='store_const', + const='.', + required=False, + help=Help.param_wp) + parser_kernels_output_optional.add_argument( + '-o', + '--force', + dest='force', + action='store_true', + required=False, + help=Help.param_force) + parser_kernels_output_optional.add_argument( + '-q', + '--quiet', + dest='quiet', + action='store_true', + required=False, + help=Help.param_quiet) parser_kernels_output._action_groups.append(parser_kernels_output_optional) parser_kernels_output.set_defaults(func=api.kernels_output_cli) @@ -844,15 +912,14 @@ def parse_kernels(subparsers): formatter_class=argparse.RawTextHelpFormatter, help=Help.command_kernels_status) parser_kernels_status_optional = parser_kernels_status._action_groups.pop() - parser_kernels_status_optional.add_argument('kernel', - nargs='?', - default=None, - help=Help.param_kernel) - parser_kernels_status_optional.add_argument('-k', - '--kernel', - dest='kernel_opt', - required=False, - help=argparse.SUPPRESS) + parser_kernels_status_optional.add_argument( + 'kernel', nargs='?', default=None, help=Help.param_kernel) + parser_kernels_status_optional.add_argument( + '-k', + '--kernel', + dest='kernel_opt', + required=False, + help=argparse.SUPPRESS) parser_kernels_status._action_groups.append(parser_kernels_status_optional) parser_kernels_status.set_defaults(func=api.kernels_status_cli) @@ -864,8 +931,8 @@ def parse_models(subparsers): help=Help.group_models, aliases=['m']) - subparsers_models = parser_models.add_subparsers(title='commands', - dest='command') + subparsers_models = parser_models.add_subparsers( + title='commands', dest='command') subparsers_models.required = True subparsers_models.choices = Help.models_choices @@ -879,11 +946,12 @@ def parse_models(subparsers): help=Help.command_models_get) parser_models_get_optional = parser_models_get._action_groups.pop() parser_models_get_optional.add_argument('model', help=Help.param_model) - parser_models_get_optional.add_argument('-p', - '--path', - dest='folder', - required=False, - help=Help.param_model_downfile) + parser_models_get_optional.add_argument( + '-p', + '--path', + dest='folder', + required=False, + help=Help.param_model_downfile) parser_models_get._action_groups.append(parser_models_get_optional) parser_models_get.set_defaults(func=api.model_get_cli) @@ -893,32 +961,32 @@ def parse_models(subparsers): formatter_class=argparse.RawTextHelpFormatter, help=Help.command_models_list) parser_models_list_optional = parser_models_list._action_groups.pop() - parser_models_list.add_argument('--sort-by', - dest='sort_by', - required=False, - help=Help.param_model_sort_by) - parser_models_list.add_argument('-s', - '--search', - dest='search', - required=False, - help=Help.param_search) - parser_models_list.add_argument('--owner', - dest='owner', - required=False, - help=Help.param_model_owner) - parser_models_list.add_argument('--page-size', - dest='page_size', - default=20, - help=Help.param_page_size) - parser_models_list.add_argument('--page-token', - dest='page_token', - required=False, - help=Help.param_page_token) - parser_models_list.add_argument('-v', - '--csv', - dest='csv_display', - action='store_true', - help=Help.param_csv) + parser_models_list.add_argument( + '--sort-by', + dest='sort_by', + required=False, + help=Help.param_model_sort_by) + parser_models_list.add_argument( + '-s', + '--search', + dest='search', + required=False, + help=Help.param_search) + parser_models_list.add_argument( + '--owner', dest='owner', required=False, help=Help.param_model_owner) + parser_models_list.add_argument( + '--page-size', dest='page_size', default=20, help=Help.param_page_size) + parser_models_list.add_argument( + '--page-token', + dest='page_token', + required=False, + help=Help.param_page_token) + parser_models_list.add_argument( + '-v', + '--csv', + dest='csv_display', + action='store_true', + help=Help.param_csv) parser_models_list._action_groups.append(parser_models_list_optional) parser_models_list.set_defaults(func=api.model_list_cli) @@ -928,11 +996,12 @@ def parse_models(subparsers): formatter_class=argparse.RawTextHelpFormatter, help=Help.command_models_init) parser_models_init_optional = parser_models_init._action_groups.pop() - parser_models_init_optional.add_argument('-p', - '--path', - dest='folder', - required=False, - help=Help.param_model_upfile) + parser_models_init_optional.add_argument( + '-p', + '--path', + dest='folder', + required=False, + help=Help.param_model_upfile) parser_models_init._action_groups.append(parser_models_init_optional) parser_models_init.set_defaults(func=api.model_initialize_cli) @@ -942,11 +1011,12 @@ def parse_models(subparsers): formatter_class=argparse.RawTextHelpFormatter, help=Help.command_models_new) parser_models_create_optional = parser_models_create._action_groups.pop() - parser_models_create_optional.add_argument('-p', - '--path', - dest='folder', - required=False, - help=Help.param_model_upfile) + parser_models_create_optional.add_argument( + '-p', + '--path', + dest='folder', + required=False, + help=Help.param_model_upfile) parser_models_create._action_groups.append(parser_models_create_optional) parser_models_create.set_defaults(func=api.model_create_new_cli) @@ -957,11 +1027,8 @@ def parse_models(subparsers): help=Help.command_models_delete) parser_models_delete_optional = parser_models_delete._action_groups.pop() parser_models_delete_optional.add_argument('model', help=Help.param_model) - parser_models_delete_optional.add_argument('-y', - '--yes', - dest='yes', - action='store_true', - help=Help.param_yes) + parser_models_delete_optional.add_argument( + '-y', '--yes', dest='yes', action='store_true', help=Help.param_yes) parser_models_delete._action_groups.append(parser_models_delete_optional) parser_models_delete.set_defaults(func=api.model_delete_cli) @@ -971,11 +1038,12 @@ def parse_models(subparsers): formatter_class=argparse.RawTextHelpFormatter, help=Help.command_models_update) parser_models_update_optional = parser_models_update._action_groups.pop() - parser_models_update_optional.add_argument('-p', - '--path', - dest='folder', - required=False, - help=Help.param_model_upfile) + parser_models_update_optional.add_argument( + '-p', + '--path', + dest='folder', + required=False, + help=Help.param_model_upfile) parser_models_update._action_groups.append(parser_models_update_optional) parser_models_update.set_defaults(func=api.model_update_cli) @@ -1045,11 +1113,12 @@ def parse_model_instances(subparsers): dest='folder', required=False, help=Help.param_model_instance_upfile) - parser_model_instances_create_optional.add_argument('-q', - '--quiet', - dest='quiet', - action='store_true', - help=Help.param_quiet) + parser_model_instances_create_optional.add_argument( + '-q', + '--quiet', + dest='quiet', + action='store_true', + help=Help.param_quiet) parser_model_instances_create_optional.add_argument( '-r', '--dir-mode', @@ -1062,6 +1131,33 @@ def parse_model_instances(subparsers): parser_model_instances_create.set_defaults( func=api.model_instance_create_cli) + # Model Instances files + parser_model_instances_files = subparsers_model_instances.add_parser( + 'files', + formatter_class=argparse.RawTextHelpFormatter, + help=Help.command_model_instances_files) + parser_model_instances_files_optional = parser_model_instances_files._action_groups.pop( + ) + parser_model_instances_files_optional.add_argument( + 'model_instance', help=Help.param_model_instance) + parser_model_instances_files_optional.add_argument( + '-v', + '--csv', + dest='csv_display', + action='store_true', + help=Help.param_csv) + parser_model_instances_files_optional.add_argument( + '--page-size', dest='page_size', default=20, help=Help.param_page_size) + parser_model_instances_files_optional.add_argument( + '--page-token', + dest='page_token', + required=False, + help=Help.param_page_token) + parser_model_instances_files._action_groups.append( + parser_model_instances_files_optional) + parser_model_instances_files.set_defaults( + func=api.model_instance_files_cli) + # Models Instances delete parser_model_instances_delete = subparsers_model_instances.add_parser( 'delete', @@ -1071,11 +1167,8 @@ def parse_model_instances(subparsers): ) parser_model_instances_delete_optional.add_argument( 'model_instance', help=Help.param_model_instance) - parser_model_instances_delete_optional.add_argument('-y', - '--yes', - dest='yes', - action='store_true', - help=Help.param_yes) + parser_model_instances_delete_optional.add_argument( + '-y', '--yes', dest='yes', action='store_true', help=Help.param_yes) parser_model_instances_delete._action_groups.append( parser_model_instances_delete_optional) parser_model_instances_delete.set_defaults( @@ -1185,6 +1278,33 @@ def parse_model_instance_versions(subparsers): parser_model_instance_versions_download.set_defaults( func=api.model_instance_version_download_cli) + # Models Instance Versions files + parser_model_instance_versions_files = subparsers_model_intance_versions.add_parser( + 'files', + formatter_class=argparse.RawTextHelpFormatter, + help=Help.command_model_instance_versions_files) + parser_model_instance_versions_files_optional = parser_model_instance_versions_files._action_groups.pop( + ) + parser_model_instance_versions_files_optional.add_argument( + 'model_instance_version', help=Help.param_model_instance_version) + parser_model_instance_versions_files_optional.add_argument( + '-v', + '--csv', + dest='csv_display', + action='store_true', + help=Help.param_csv) + parser_model_instance_versions_files_optional.add_argument( + '--page-size', dest='page_size', default=20, help=Help.param_page_size) + parser_model_instance_versions_files_optional.add_argument( + '--page-token', + dest='page_token', + required=False, + help=Help.param_page_token) + parser_model_instance_versions_files._action_groups.append( + parser_model_instance_versions_files_optional) + parser_model_instance_versions_files.set_defaults( + func=api.model_instance_version_files_cli) + # Models Instance Versions delete parser_model_instance_versions_delete = subparsers_model_intance_versions.add_parser( 'delete', @@ -1209,8 +1329,8 @@ def parse_files(subparsers): help=Help.group_files, aliases=['f']) - subparsers_files = parser_files.add_subparsers(title='commands', - dest='command') + subparsers_files = parser_files.add_subparsers( + title='commands', dest='command') subparsers_files.required = True subparsers_files.choices = Help.files_choices @@ -1256,8 +1376,8 @@ def parse_config(subparsers): 'config', formatter_class=argparse.RawTextHelpFormatter, help=Help.group_config) - subparsers_config = parser_config.add_subparsers(title='commands', - dest='command') + subparsers_config = parser_config.add_subparsers( + title='commands', dest='command') subparsers_config.required = True subparsers_config.choices = Help.config_choices @@ -1274,16 +1394,18 @@ def parse_config(subparsers): parser_config_set._action_groups.pop() parser_config_set_required = parser_config_set.add_argument_group( 'required arguments') - parser_config_set_required.add_argument('-n', - '--name', - dest='name', - required=True, - help=Help.param_config_name) - parser_config_set_required.add_argument('-v', - '--value', - dest='value', - required=True, - help=Help.param_config_value) + parser_config_set_required.add_argument( + '-n', + '--name', + dest='name', + required=True, + help=Help.param_config_name) + parser_config_set_required.add_argument( + '-v', + '--value', + dest='value', + required=True, + help=Help.param_config_value) parser_config_set.set_defaults(func=api.set_config_value) parser_config_unset = subparsers_config.add_parser( @@ -1293,11 +1415,12 @@ def parse_config(subparsers): parser_config_unset._action_groups.pop() parser_config_unset_required = parser_config_unset.add_argument_group( 'required arguments') - parser_config_unset_required.add_argument('-n', - '--name', - dest='name', - required=True, - help=Help.param_config_name) + parser_config_unset_required.add_argument( + '-n', + '--name', + dest='name', + required=True, + help=Help.param_config_name) parser_config_unset.set_defaults(func=api.unset_config_value) @@ -1313,14 +1436,18 @@ class Help(object): 'list', 'files', 'download', 'create', 'version', 'init', 'metadata', 'status' ] - kernels_choices = ['list', 'init', 'push', 'pull', 'output', 'status'] + kernels_choices = [ + 'list', 'files', 'init', 'push', 'pull', 'output', 'status' + ] models_choices = [ 'instances', 'get', 'list', 'init', 'create', 'delete', 'update' ] model_instances_choices = [ - 'versions', 'get', 'init', 'create', 'delete', 'update' + 'versions', 'get', 'files', 'init', 'create', 'delete', 'update' + ] + model_instance_versions_choices = [ + 'init', 'create', 'download', 'delete', 'files' ] - model_instance_versions_choices = ['init', 'create', 'download', 'delete'] files_choices = ['upload'] config_choices = ['view', 'set', 'unset'] @@ -1365,6 +1492,7 @@ class Help(object): command_kernels_list = ( 'List available kernels. By default, shows 20 results sorted by ' 'hotness') + command_kernels_files = 'List kernel output files' command_kernels_init = 'Initialize metadata file for a kernel' command_kernels_push = 'Push new code to a kernel and run the kernel' command_kernels_pull = 'Pull down code from a kernel' @@ -1372,6 +1500,7 @@ class Help(object): command_kernels_status = 'Display the status of the latest kernel run' # Models commands + command_models_files = 'List model files' command_models_get = 'Get a model' command_models_list = 'List models' command_models_init = 'Initialize metadata file for model creation' @@ -1413,9 +1542,10 @@ class Help(object): param_upfile = 'File for upload (full path)' param_csv = 'Print results in CSV format (if not set print in table format)' param_page = 'Page number for results paging. Page size is 20 by default' + # NOTE: Default and max page size are set by the mid-tier code. param_page_size = ( 'Number of items to show on a page. Default size is 20, ' - 'max is 100') + 'max is 200') param_page_token = 'Page token for results paging.' param_search = 'Term(s) to search for' param_mine = 'Display only my items' @@ -1553,6 +1683,7 @@ class Help(object): ) command_model_instances_get = 'Get a model instance' command_model_instances_init = 'Initialize metadata file for model instance creation' + command_model_instances_files = 'List files for the current version of a model instance' command_model_instances_new = 'Create a new model instance' param_model_instance_downfile = ( 'Folder for downloading the special model-instance-metadata.json file ' @@ -1579,6 +1710,7 @@ class Help(object): 'Defaults to current working directory') command_model_instance_versions_delete = 'Delete a model instance version' command_model_instance_versions_download = 'Download model instance version files' + command_model_instance_versions_files = 'List model instance version files' param_model_instance_version_notes = 'Version notes to record for the new model instance version' # Files params @@ -1597,3 +1729,7 @@ class Help(object): ('Value of the configuration parameter, valid values ' 'depending on name\n- competition: ') + param_competition_nonempty + '\n- path: ' + param_downfolder + '\n- proxy: ' + param_proxy) + + +if __name__ == '__main__': + main() diff --git a/kaggle/models/kaggle_models_extended.py b/kaggle/models/kaggle_models_extended.py index c92fa50..f285f55 100644 --- a/kaggle/models/kaggle_models_extended.py +++ b/kaggle/models/kaggle_models_extended.py @@ -37,7 +37,6 @@ class Competition(object): - def __init__(self, init_dict): parsed_dict = {k: parse(v) for k, v in init_dict.items()} self.__dict__.update(parsed_dict) @@ -48,7 +47,6 @@ def __repr__(self): class SubmitResult(object): - def __init__(self, init_dict): parsed_dict = {k: parse(v) for k, v in init_dict.items()} self.__dict__.update(parsed_dict) @@ -58,7 +56,6 @@ def __repr__(self): class Submission(object): - def __init__(self, init_dict): parsed_dict = {k: parse(v) for k, v in init_dict.items()} self.__dict__.update(parsed_dict) @@ -72,7 +69,6 @@ def __repr__(self): class LeaderboardEntry(object): - def __init__(self, init_dict): parsed_dict = {k: parse(v) for k, v in init_dict.items()} self.__dict__.update(parsed_dict) @@ -82,7 +78,6 @@ def __repr__(self): class Dataset(object): - def __init__(self, init_dict): parsed_dict = {k: parse(v) for k, v in init_dict.items()} self.__dict__.update(parsed_dict) @@ -96,7 +91,6 @@ def __repr__(self): class Model(object): - def __init__(self, init_dict): parsed_dict = {k: parse(v) for k, v in init_dict.items()} self.__dict__.update(parsed_dict) @@ -106,7 +100,6 @@ def __repr__(self): class Metadata(object): - def __init__(self, init_info): parsed_info = {k: parse(v) for k, v in init_info.items()} # backwards compatibility @@ -119,7 +112,6 @@ def __repr__(self): class DatasetVersion(object): - def __init__(self, init_dict): parsed_dict = {k: parse(v) for k, v in init_dict.items()} self.__dict__.update(parsed_dict) @@ -129,7 +121,6 @@ def __repr__(self): class File(object): - def __init__(self, init_dict): parsed_dict = {k: parse(v) for k, v in init_dict.items()} self.__dict__.update(parsed_dict) @@ -149,7 +140,6 @@ def get_size(size, precision=0): class Tag(object): - def __init__(self, init_dict): parsed_dict = {k: parse(v) for k, v in init_dict.items()} self.__dict__.update(parsed_dict) @@ -159,7 +149,6 @@ def __repr__(self): class DatasetNewVersionResponse(object): - def __init__(self, init_dict): parsed_dict = {k: parse(v) for k, v in init_dict.items()} self.__dict__.update(parsed_dict) @@ -169,7 +158,6 @@ def __repr__(self): class DatasetNewResponse(object): - def __init__(self, init_dict): parsed_dict = {k: parse(v) for k, v in init_dict.items()} self.__dict__.update(parsed_dict) @@ -179,7 +167,6 @@ def __repr__(self): class ListFilesResult(object): - def __init__(self, init_dict): self.error_message = init_dict['errorMessage'] files = init_dict['datasetFiles'] @@ -187,13 +174,17 @@ def __init__(self, init_dict): self.files = [File(f) for f in files] else: self.files = {} + token = init_dict['nextPageToken'] + if token: + self.nextPageToken = token + else: + self.nextPageToken = "" def __repr__(self): return self.error_message class Kernel: - def __init__(self, init_dict): parsed_dict = {k: parse(v) for k, v in init_dict.items()} self.__dict__.update(parsed_dict) @@ -203,7 +194,6 @@ def __repr__(self): class KernelPushResponse(object): - def __init__(self, init_dict): parsed_dict = {k: parse(v) for k, v in init_dict.items()} self.__dict__.update(parsed_dict) @@ -213,7 +203,6 @@ def __repr__(self): class ModelNewResponse(object): - def __init__(self, init_dict): parsed_dict = {k: parse(v) for k, v in init_dict.items()} self.__dict__.update(parsed_dict) @@ -223,7 +212,6 @@ def __repr__(self): class ModelDeleteResponse(object): - def __init__(self, init_dict): parsed_dict = {k: parse(v) for k, v in init_dict.items()} self.__dict__.update(parsed_dict) diff --git a/kaggle/test/test_authenticate.py b/kaggle/test/test_authenticate.py index af8e92f..06d8787 100644 --- a/kaggle/test/test_authenticate.py +++ b/kaggle/test/test_authenticate.py @@ -23,7 +23,6 @@ class TestAuthenticate(unittest.TestCase): - def setUp(self): print("setup class:%s" % self) diff --git a/src/KaggleSwagger.yaml b/src/KaggleSwagger.yaml index 6a79a8b..a9ceefb 100644 --- a/src/KaggleSwagger.yaml +++ b/src/KaggleSwagger.yaml @@ -264,6 +264,17 @@ paths: required: true type: string description: Competition name + - in: query + name: pageToken + required: false + type: string + description: Page token for pagination + - in: query + name: pageSize + required: false + type: integer + default: 20 + description: Number of items per page (default 20) responses: 200: description: Result @@ -462,6 +473,17 @@ paths: required: false type: string description: Dataset version number + - in: query + name: pageToken + required: false + type: string + description: Page token for pagination + - in: query + name: pageSize + required: false + default: 20 + type: integer + description: Number of items per page (default 20) responses: 200: description: Result @@ -681,7 +703,7 @@ paths: name: pageSize type: integer default: 20 - description: Page size + description: Number of items per page (default 20) - in: query name: search default: "" @@ -753,6 +775,48 @@ paths: description: Error schema: $ref: "#/definitions/Error" + /kernels/files: + get: + tags: + - kaggle + summary: List kernel files + operationId: KernelsListFiles + produces: + - application/json + parameters: + - in: query + name: userName + required: true + type: string + description: Kernel owner + - in: query + name: kernelSlug + required: true + type: string + description: Kernel name + - in: query + name: kernelVersionNumber + required: false + type: string + description: Kernel version number + - in: query + name: pageSize + type: integer + default: 20 + description: Number of items per page (default 20) + - in: query + name: pageToken + type: string + description: Page token for pagination + responses: + 200: + description: Result + schema: + $ref: "#/definitions/Result" + default: + description: Error + schema: + $ref: "#/definitions/Error" /kernels/push: post: tags: @@ -991,8 +1055,55 @@ paths: - in: query name: pageSize type: integer - default: 1 - description: Page size + default: 20 + description: Number of items per page (default 20) + - in: query + name: pageToken + type: string + description: Page token for pagination + responses: + 200: + description: Result + schema: + $ref: "#/definitions/Result" + default: + description: Error + schema: + $ref: "#/definitions/Error" + /models/{ownerSlug}/{modelSlug}/{framework}/{instanceSlug}/files: + get: + tags: + - kaggle + summary: List model instance files for the current version + operationId: ModelInstanceFiles + produces: + - application/json + parameters: + - in: path + name: ownerSlug + required: true + type: string + description: Model owner + - in: path + name: modelSlug + required: true + type: string + description: Model name + - in: path + name: framework + required: true + type: string + description: Model instance framework + - in: path + name: instanceSlug + required: true + type: string + description: Model instance slug + - in: query + name: pageSize + type: integer + default: 20 + description: Number of items per page (default 20) - in: query name: pageToken type: string @@ -1394,6 +1505,58 @@ paths: description: Error schema: $ref: "#/definitions/Error" + /models/{ownerSlug}/{modelSlug}/{framework}/{instanceSlug}/{versionNumber}/files: + get: + tags: + - kaggle + summary: List model instance version files + operationId: ModelInstanceVersionFiles + produces: + - application/json + parameters: + - in: path + name: ownerSlug + required: true + type: string + description: Model owner + - in: path + name: modelSlug + required: true + type: string + description: Model name + - in: path + name: framework + required: true + type: string + description: Model instance framework + - in: path + name: instanceSlug + required: true + type: string + description: Model instance slug + - in: path + name: versionNumber + required: true + type: string + description: Model instance version number + - in: query + name: pageSize + type: integer + default: 20 + description: Number of items per page (default 20) + - in: query + name: pageToken + type: string + description: Page token for pagination + responses: + 200: + description: Result + schema: + $ref: "#/definitions/Result" + default: + description: Error + schema: + $ref: "#/definitions/Error" /blobs/upload: post: tags: diff --git a/src/kaggle/api/kaggle_api_extended.py b/src/kaggle/api/kaggle_api_extended.py index ec4938d..e7fd172 100644 --- a/src/kaggle/api/kaggle_api_extended.py +++ b/src/kaggle/api/kaggle_api_extended.py @@ -83,7 +83,6 @@ class DirectoryArchive(object): - def __init__(self, fullpath, format): self._fullpath = fullpath self._format = format @@ -93,8 +92,9 @@ def __init__(self, fullpath, format): def __enter__(self): self._temp_dir = tempfile.mkdtemp() _, dir_name = os.path.split(self._fullpath) - self.path = shutil.make_archive(os.path.join(self._temp_dir, dir_name), - self._format, self._fullpath) + self.path = shutil.make_archive( + os.path.join(self._temp_dir, dir_name), self._format, + self._fullpath) _, self.name = os.path.split(self.path) return self @@ -103,7 +103,6 @@ def __exit__(self, *args): class ResumableUploadContext(object): - def __init__(self, no_resume=False): self.no_resume = no_resume self._temp_dir = os.path.join(tempfile.gettempdir(), '.kaggle/uploads') @@ -349,7 +348,6 @@ def with_retry(self, initial_delay_millis=500, retry_multiplier=1.7, randomness_factor=0.5): - def retriable_func(*args): for i in range(1, max_retries + 1): try: @@ -359,9 +357,8 @@ def retriable_func(*args): total_delay = self._calculate_backoff_delay( i, initial_delay_millis, retry_multiplier, randomness_factor) - print( - 'Request failed: %s. Will retry in %2.1f seconds' % - (e, total_delay)) + print('Request failed: %s. Will retry in %2.1f seconds' + % (e, total_delay)) time.sleep(total_delay) continue raise @@ -389,8 +386,8 @@ def authenticate(self): or self.CONFIG_NAME_KEY not in config_data: if os.path.exists(self.config): config_data = self.read_config_file(config_data) - elif self._is_help_or_version_command(api_command) or (len( - sys.argv) > 2 and api_command.startswith( + elif self._is_help_or_version_command(api_command) or ( + len(sys.argv) > 2 and api_command.startswith( self.command_prefixes_allowing_anonymous_access)): # Some API commands should be allowed without authentication. return @@ -468,8 +465,8 @@ def _load_config(self, config_data): # Cert File if self.CONFIG_NAME_SSL_CA_CERT in config_data: - configuration.ssl_ca_cert = config_data[ - self.CONFIG_NAME_SSL_CA_CERT] + configuration.ssl_ca_cert = config_data[self. + CONFIG_NAME_SSL_CA_CERT] # Keep config values with class instance, and load api client! @@ -690,11 +687,12 @@ def competitions_list(self, str(self.valid_competition_sort_by)) competitions_list_result = self.process_response( - self.competitions_list_with_http_info(group=group or '', - category=category or '', - sort_by=sort_by or '', - page=page, - search=search or '')) + self.competitions_list_with_http_info( + group=group or '', + category=category or '', + sort_by=sort_by or '', + page=page, + search=search or '')) return [Competition(c) for c in competitions_list_result] def competitions_list_cli(self, @@ -715,11 +713,12 @@ def competitions_list_cli(self, search: a search term to use (default is empty string) csv_display: if True, print comma separated values """ - competitions = self.competitions_list(group=group, - category=category, - sort_by=sort_by, - page=page, - search=search) + competitions = self.competitions_list( + group=group, + category=category, + sort_by=sort_by, + page=page, + search=search) fields = [ 'ref', 'deadline', 'category', 'reward', 'teamCount', 'userHasEntered' @@ -772,9 +771,8 @@ def competition_submit(self, file_name, message, competition, quiet=False): upload_result_token = upload_result['token'] else: # New submissions path! - upload_status = self.upload_complete(file_name, - url_result['createUrl'], - quiet) + upload_status = self.upload_complete( + file_name, url_result['createUrl'], quiet) if upload_status != ResumableUploadResult.COMPLETE: # Actual error is printed during upload_complete. Not # ideal but changing would not be backwards compatible @@ -815,12 +813,17 @@ def competition_submit_cli(self, raise e return submit_result - def competition_submissions(self, competition): + def competition_submissions(self, + competition, + page_token=None, + page_size=20): """ get the list of Submission for a particular competition Parameters ========== competition: the name of the competition + page_token: token for pagination + page_size: the number of items per page """ submissions_result = self.process_response( self.competitions_submissions_list_with_http_info(id=competition)) @@ -830,6 +833,8 @@ def competition_submissions_cli(self, competition=None, competition_opt=None, csv_display=False, + page_token=None, + page_size=20, quiet=False): """ wrapper to competition_submission, will return either json or csv to the user. Additional parameters are listed below, see @@ -840,6 +845,8 @@ def competition_submissions_cli(self, competition: the name of the competition. If None, look to config competition_opt: an alternative competition option provided by cli csv_display: if True, print comma separated values + page_token: token for pagination + page_size: the number of items per page quiet: suppress verbose output (default is False) """ competition = competition or competition_opt @@ -851,7 +858,8 @@ def competition_submissions_cli(self, if competition is None: raise ValueError('No competition specified') else: - submissions = self.competition_submissions(competition) + submissions = self.competition_submissions(competition, page_token, + page_size) fields = [ 'fileName', 'date', 'description', 'status', 'publicScore', 'privateScore' @@ -864,20 +872,28 @@ def competition_submissions_cli(self, else: print('No submissions found') - def competition_list_files(self, competition): + def competition_list_files(self, + competition, + page_token=None, + page_size=20): """ list files for competition Parameters ========== competition: the name of the competition + page_token: the page token for pagination + page_size: the number of items per page """ competition_list_files_result = self.process_response( - self.competitions_data_list_files_with_http_info(id=competition)) - return [File(f) for f in competition_list_files_result] + self.competitions_data_list_files_with_http_info( + id=competition, page_token=page_token, page_size=page_size)) + return FileList(competition_list_files_result) def competition_list_files_cli(self, competition, competition_opt=None, csv_display=False, + page_token=None, + page_size=20, quiet=False): """ List files for a competition, if it exists @@ -886,6 +902,8 @@ def competition_list_files_cli(self, competition: the name of the competition. If None, look to config competition_opt: an alternative competition option provided by cli csv_display: if True, print comma separated values + page_token: the page token for pagination + page_size: the number of items per page quiet: suppress verbose output (default is False) """ competition = competition or competition_opt @@ -897,13 +915,17 @@ def competition_list_files_cli(self, if competition is None: raise ValueError('No competition specified') else: - files = self.competition_list_files(competition) + result = self.competition_list_files(competition, page_token, + page_size) + next_page_token = result.nextPageToken + if next_page_token: + print('Next Page Token = {}'.format(next_page_token)) fields = ['name', 'size', 'creationDate'] - if files: + if result: if csv_display: - self.print_csv(files, fields) + self.print_csv(result.files, fields) else: - self.print_table(files, fields) + self.print_table(result.files, fields) else: print('No files found') @@ -1125,9 +1147,8 @@ def dataset_list(self, ) if file_type and file_type not in self.valid_dataset_file_types: - raise ValueError( - 'Invalid file type specified. Valid options are ' + - str(self.valid_dataset_file_types)) + raise ValueError('Invalid file type specified. Valid options are ' + + str(self.valid_dataset_file_types)) if license_name and license_name not in self.valid_dataset_license_names: raise ValueError('Invalid license specified. Valid options are ' + @@ -1153,17 +1174,18 @@ def dataset_list(self, group = 'user' datasets_list_result = self.process_response( - self.datasets_list_with_http_info(group=group, - sort_by=sort_by or 'hottest', - size=size, - filetype=file_type or 'all', - license=license_name or 'all', - tagids=tag_ids or '', - search=search or '', - user=user or '', - page=page, - max_size=max_size, - min_size=min_size)) + self.datasets_list_with_http_info( + group=group, + sort_by=sort_by or 'hottest', + size=size, + filetype=file_type or 'all', + license=license_name or 'all', + tagids=tag_ids or '', + search=search or '', + user=user or '', + page=page, + max_size=max_size, + min_size=min_size)) return [Dataset(d) for d in datasets_list_result] def dataset_list_cli(self, @@ -1289,12 +1311,14 @@ def dataset_metadata_cli(self, dataset, path, update, dataset_opt=None): meta_file = self.dataset_metadata(dataset, path) print('Downloaded metadata to ' + meta_file) - def dataset_list_files(self, dataset): + def dataset_list_files(self, dataset, page_token=None, page_size=20): """ list files for a dataset Parameters ========== dataset: the string identified of the dataset should be in format [owner]/[dataset-name] + page_token: the page token for pagination + page_size: the number of items per page """ if dataset is None: raise ValueError('A dataset must be specified') @@ -1305,13 +1329,17 @@ def dataset_list_files(self, dataset): self.datasets_list_files_with_http_info( owner_slug=owner_slug, dataset_slug=dataset_slug, - dataset_version_number=dataset_version_number)) + dataset_version_number=dataset_version_number, + page_token=page_token, + page_size=page_size)) return ListFilesResult(dataset_list_files_result) def dataset_list_files_cli(self, dataset, dataset_opt=None, - csv_display=False): + csv_display=False, + page_token=None, + page_size=20): """ a wrapper to dataset_list_files for the client (list files for a dataset) Parameters @@ -1320,13 +1348,19 @@ def dataset_list_files_cli(self, should be in format [owner]/[dataset-name] dataset_opt: an alternative option to providing a dataset csv_display: if True, print comma separated values instead of table + page_token: the page token for pagination + page_size: the number of items per page """ dataset = dataset or dataset_opt - result = self.dataset_list_files(dataset) + result = self.dataset_list_files(dataset, page_token, page_size) + if result: if result.error_message: print(result.error_message) else: + next_page_token = result.nextPageToken + if next_page_token: + print('Next Page Token = {}'.format(next_page_token)) fields = ['name', 'size', 'creationDate'] if csv_display: self.print_csv(result.files, fields) @@ -1353,8 +1387,8 @@ def dataset_status(self, dataset): owner_slug = self.get_config_value(self.CONFIG_NAME_USER) dataset_slug = dataset dataset_status_result = self.process_response( - self.datasets_status_with_http_info(owner_slug=owner_slug, - dataset_slug=dataset_slug)) + self.datasets_status_with_http_info( + owner_slug=owner_slug, dataset_slug=dataset_slug)) return dataset_status_result def dataset_status_cli(self, dataset, dataset_opt=None): @@ -1536,19 +1570,21 @@ def dataset_download_cli(self, ] if file_name is None: - self.dataset_download_files(dataset, - path=path, - unzip=unzip, - force=force, - quiet=quiet, - licenses=licenses) + self.dataset_download_files( + dataset, + path=path, + unzip=unzip, + force=force, + quiet=quiet, + licenses=licenses) else: - self.dataset_download_file(dataset, - file_name, - path=path, - force=force, - quiet=quiet, - licenses=licenses) + self.dataset_download_file( + dataset, + file_name, + path=path, + force=force, + quiet=quiet, + licenses=licenses) def _upload_blob(self, path, quiet, blob_type, upload_context): """ upload a file @@ -1709,9 +1745,8 @@ def dataset_create_version_cli(self, ('The following are not valid tags and could not be added to ' 'the dataset: ') + str(result.invalidTags)) elif result.status.lower() == 'ok': - print( - 'Dataset version is being created. Please check progress at ' + - result.url) + print('Dataset version is being created. Please check progress at ' + + result.url) else: print('Dataset version creation error: ' + result.error) @@ -1778,8 +1813,8 @@ def dataset_create_new(self, dataset_slug = ref_list[1] # validations - if ref == self.config_values[ - self.CONFIG_NAME_USER] + '/INSERT_SLUG_HERE': + if ref == self.config_values[self. + CONFIG_NAME_USER] + '/INSERT_SLUG_HERE': raise ValueError( 'Default slug detected, please change values before uploading') if title == 'INSERT_TITLE_HERE': @@ -1807,16 +1842,17 @@ def dataset_create_new(self, raise ValueError( 'Subtitle length must be between 20 and 80 characters') - request = DatasetNewRequest(title=title, - slug=dataset_slug, - owner_slug=owner_slug, - license_name=license_name, - subtitle=subtitle, - description=description, - files=[], - is_private=not public, - convert_to_csv=convert_to_csv, - category_ids=keywords) + request = DatasetNewRequest( + title=title, + slug=dataset_slug, + owner_slug=owner_slug, + license_name=license_name, + subtitle=subtitle, + description=description, + files=[], + is_private=not public, + convert_to_csv=convert_to_csv, + category_ids=keywords) with ResumableUploadContext() as upload_context: self.upload_files(request, resources, folder, ApiBlobType.DATASET, @@ -1911,28 +1947,30 @@ def download_file(self, headers={'Range': 'bytes=%d-' % (size_read, )}, _preload_content=False) - with tqdm(total=size, - initial=size_read, - unit='B', - unit_scale=True, - unit_divisor=1024, - disable=quiet) as pbar: + with tqdm( + total=size, + initial=size_read, + unit='B', + unit_scale=True, + unit_divisor=1024, + disable=quiet) as pbar: with open(outfile, open_mode) as out: while True: data = response.read(chunk_size) if not data: break out.write(data) - os.utime(outfile, - times=(remote_date_timestamp - 1, - remote_date_timestamp - 1)) + os.utime( + outfile, + times=(remote_date_timestamp - 1, + remote_date_timestamp - 1)) size_read = min(size, size_read + chunk_size) pbar.update(len(data)) if not quiet: print('\n', end='') - os.utime(outfile, - times=(remote_date_timestamp, remote_date_timestamp)) + os.utime( + outfile, times=(remote_date_timestamp, remote_date_timestamp)) def kernels_list(self, page=1, @@ -2003,18 +2041,19 @@ def kernels_list(self, group = 'profile' kernels_list_result = self.process_response( - self.kernels_list_with_http_info(page=page, - page_size=page_size, - group=group, - user=user or '', - language=language or 'all', - kernel_type=kernel_type or 'all', - output_type=output_type or 'all', - sort_by=sort_by or 'hotness', - dataset=dataset or '', - competition=competition or '', - parent_kernel=parent_kernel or '', - search=search or '')) + self.kernels_list_with_http_info( + page=page, + page_size=page_size, + group=group, + user=user or '', + language=language or 'all', + kernel_type=kernel_type or 'all', + output_type=output_type or 'all', + sort_by=sort_by or 'hotness', + dataset=dataset or '', + competition=competition or '', + parent_kernel=parent_kernel or '', + search=search or '')) return [Kernel(k) for k in kernels_list_result] def kernels_list_cli(self, @@ -2037,18 +2076,19 @@ def kernels_list_cli(self, ========== csv_display: if True, print comma separated values instead of table """ - kernels = self.kernels_list(page=page, - page_size=page_size, - search=search, - mine=mine, - dataset=dataset, - competition=competition, - parent_kernel=parent, - user=user, - language=language, - kernel_type=kernel_type, - output_type=output_type, - sort_by=sort_by) + kernels = self.kernels_list( + page=page, + page_size=page_size, + search=search, + mine=mine, + dataset=dataset, + competition=competition, + parent_kernel=parent, + user=user, + language=language, + kernel_type=kernel_type, + output_type=output_type, + sort_by=sort_by) fields = ['ref', 'title', 'author', 'lastRunTime', 'totalVotes'] if kernels: if csv_display: @@ -2058,6 +2098,65 @@ def kernels_list_cli(self, else: print('Not found') + def kernels_list_files(self, kernel, page_token=None, page_size=20): + """ list files for a kernel + Parameters + ========== + kernel: the string identifier of the kernel + should be in format [owner]/[kernel-name] + page_token: the page token for pagination + page_size: the number of items per page + """ + if kernel is None: + raise ValueError('A kernel must be specified') + user_name, kernel_slug, kernel_version_number = self.split_dataset_string( + kernel) + + kernels_list_files_result = self.process_response( + self.kernels_list_files_with_http_info( + kernel_slug=kernel_slug, + user_name=user_name, + page_token=page_token, + page_size=page_size)) + return FileList(kernels_list_files_result) + + def kernels_list_files_cli(self, + kernel, + kernel_opt=None, + csv_display=False, + page_token=None, + page_size=20): + """ a wrapper to kernel_list_files for the client + (list files for a kernel) + Parameters + ========== + kernel: the string identifier of the kernel + should be in format [owner]/[kernel-name] + kernel_opt: an alternative option to providing a kernel + csv_display: if True, print comma separated values instead of table + page_token: the page token for pagination + page_size: the number of items per page + """ + kernel = kernel or kernel_opt + result = self.kernels_list_files(kernel, page_token, page_size) + + if result is None: + print('No files found') + return + + if result.error_message: + print(result.error_message) + return + + next_page_token = result.nextPageToken + if next_page_token: + print('Next Page Token = {}'.format(next_page_token)) + fields = ['name', 'size', 'creationDate'] + if csv_display: + self.print_csv(result.files, fields) + else: + self.print_table(result.files, fields) + def kernels_initialize(self, folder): """ create a new kernel in a specified folder from template, including json metadata that grabs values from the configuration. @@ -2081,11 +2180,11 @@ def kernels_initialize(self, folder): 'code_file': 'INSERT_CODE_FILE_PATH_HERE', 'language': - 'Pick one of: {' + - ','.join(x for x in self.valid_push_language_types) + '}', + 'Pick one of: {' + ','.join( + x for x in self.valid_push_language_types) + '}', 'kernel_type': - 'Pick one of: {' + - ','.join(x for x in self.valid_push_kernel_types) + '}', + 'Pick one of: {' + ','.join( + x for x in self.valid_push_kernel_types) + '}', 'is_private': 'true', 'enable_gpu': @@ -2195,9 +2294,8 @@ def kernels_push(self, folder): for source in model_sources: self.validate_model_string(source) - docker_pinning_type = self.get_or_default(meta_data, - 'docker_image_pinning_type', - None) + docker_pinning_type = self.get_or_default( + meta_data, 'docker_image_pinning_type', None) if (docker_pinning_type is not None and docker_pinning_type not in self.valid_push_pinning_types): raise ValueError( @@ -2371,8 +2469,8 @@ def kernels_pull(self, kernel, path, metadata=False, quiet=True): if file_name is None: print( 'Unknown language %s + kernel type %s - please report this ' - 'on the kaggle-api github issues' % - (language, kernel_type)) + 'on the kaggle-api github issues' % (language, + kernel_type)) print( 'Saving as a python file, even though this may not be the ' 'correct language') @@ -2428,10 +2526,8 @@ def kernels_pull_cli(self, """ client wrapper for kernels_pull """ kernel = kernel or kernel_opt - effective_path = self.kernels_pull(kernel, - path=path, - metadata=metadata, - quiet=False) + effective_path = self.kernels_pull( + kernel, path=path, metadata=metadata, quiet=False) if metadata: print('Source code and metadata downloaded to ' + effective_path) else: @@ -2613,14 +2709,15 @@ def model_list(self, raise ValueError('Page size must be >= 1') models_list_result = self.process_response( - self.models_list_with_http_info(sort_by=sort_by or 'hotness', - search=search or '', - owner=owner or '', - page_size=page_size, - page_token=page_token)) + self.models_list_with_http_info( + sort_by=sort_by or 'hotness', + search=search or '', + owner=owner or '', + page_size=page_size, + page_token=page_token)) next_page_token = models_list_result['nextPageToken'] - if next_page_token != '': + if next_page_token: print('Next Page Token = {}'.format(next_page_token)) return [Model(m) for m in models_list_result['models']] @@ -2664,12 +2761,18 @@ def model_initialize(self, folder): raise ValueError('Invalid folder: ' + folder) meta_data = { - 'ownerSlug': 'INSERT_OWNER_SLUG_HERE', - 'title': 'INSERT_TITLE_HERE', - 'slug': 'INSERT_SLUG_HERE', - 'subtitle': '', - 'isPrivate': True, - 'description': '''# Model Summary + 'ownerSlug': + 'INSERT_OWNER_SLUG_HERE', + 'title': + 'INSERT_TITLE_HERE', + 'slug': + 'INSERT_SLUG_HERE', + 'subtitle': + '', + 'isPrivate': + True, + 'description': + '''# Model Summary # Model Characteristics @@ -2677,8 +2780,10 @@ def model_initialize(self, folder): # Evaluation Results ''', - 'publishTime': '', - 'provenanceSources': '' + 'publishTime': + '', + 'provenanceSources': + '' } meta_file = os.path.join(folder, self.MODEL_METADATA_FILE) with open(meta_file, 'w') as f: @@ -2732,14 +2837,15 @@ def model_create_new(self, folder): if publish_time: self.validate_date(publish_time) - request = ModelNewRequest(owner_slug=owner_slug, - slug=slug, - title=title, - subtitle=subtitle, - is_private=is_private, - description=description, - publish_time=publish_time, - provenance_sources=provenance_sources) + request = ModelNewRequest( + owner_slug=owner_slug, + slug=slug, + title=title, + subtitle=subtitle, + is_private=is_private, + description=description, + publish_time=publish_time, + provenance_sources=provenance_sources) result = ModelNewResponse( self.process_response( self.models_create_new_with_http_info(request))) @@ -2851,13 +2957,14 @@ def model_update(self, folder): if provenance_sources != None: update_mask['paths'].append('provenance_sources') - request = ModelUpdateRequest(title=title, - subtitle=subtitle, - is_private=is_private, - description=description, - publish_time=publish_time, - provenance_sources=provenance_sources, - update_mask=update_mask) + request = ModelUpdateRequest( + title=title, + subtitle=subtitle, + is_private=is_private, + description=description, + publish_time=publish_time, + provenance_sources=provenance_sources, + update_mask=update_mask) result = ModelNewResponse( self.process_response( self.update_model_with_http_info(owner_slug, slug, request))) @@ -2949,12 +3056,18 @@ def model_instance_initialize(self, folder): raise ValueError('Invalid folder: ' + folder) meta_data = { - 'ownerSlug': 'INSERT_OWNER_SLUG_HERE', - 'modelSlug': 'INSERT_EXISTING_MODEL_SLUG_HERE', - 'instanceSlug': 'INSERT_INSTANCE_SLUG_HERE', - 'framework': 'INSERT_FRAMEWORK_HERE', - 'overview': '', - 'usage': '''# Model Format + 'ownerSlug': + 'INSERT_OWNER_SLUG_HERE', + 'modelSlug': + 'INSERT_EXISTING_MODEL_SLUG_HERE', + 'instanceSlug': + 'INSERT_INSTANCE_SLUG_HERE', + 'framework': + 'INSERT_FRAMEWORK_HERE', + 'overview': + '', + 'usage': + '''# Model Format # Training Data @@ -2968,12 +3081,17 @@ def model_instance_initialize(self, folder): # Changelog ''', - 'licenseName': 'Apache 2.0', - 'fineTunable': False, + 'licenseName': + 'Apache 2.0', + 'fineTunable': + False, 'trainingData': [], - 'modelInstanceType': 'Unspecified', - 'baseModelInstanceId': 0, - 'externalBaseModelUrl': '' + 'modelInstanceType': + 'Unspecified', + 'baseModelInstanceId': + 0, + 'externalBaseModelUrl': + '' } meta_file = os.path.join(folder, self.MODEL_INSTANCE_METADATA_FILE) with open(meta_file, 'w') as f: @@ -3013,14 +3131,12 @@ def model_instance_create(self, folder, quiet=False, dir_mode='skip'): license_name = self.get_or_fail(meta_data, 'licenseName') fine_tunable = self.get_or_default(meta_data, 'fineTunable', False) training_data = self.get_or_default(meta_data, 'trainingData', []) - model_instance_type = self.get_or_default(meta_data, - 'modelInstanceType', - 'Unspecified') + model_instance_type = self.get_or_default( + meta_data, 'modelInstanceType', 'Unspecified') base_model_instance = self.get_or_default(meta_data, 'baseModelInstance', '') - external_base_model_url = self.get_or_default(meta_data, - 'externalBaseModelUrl', - '') + external_base_model_url = self.get_or_default( + meta_data, 'externalBaseModelUrl', '') # validations if owner_slug == 'INSERT_OWNER_SLUG_HERE': @@ -3065,9 +3181,8 @@ def model_instance_create(self, folder, quiet=False, dir_mode='skip'): result = ModelNewResponse( self.process_response( self.with_retry( - self.models_create_instance_with_http_info)(owner_slug, - model_slug, - request))) + self.models_create_instance_with_http_info)( + owner_slug, model_slug, request))) return result @@ -3127,6 +3242,73 @@ def model_instance_delete_cli(self, model_instance, yes): else: print('The model instance was deleted.') + def model_instance_files(self, + model_instance, + page_token=None, + page_size=20, + csv_display=False): + """ list all files for the current version of a model instance + + Parameters + ========== + model_instance: the string identifier of the model instance + should be in format [owner]/[model-name]/[framework]/[instance-slug] + page_token: token for pagination + page_size: the number of items per page + csv_display: if True, print comma separated values instead of table + """ + if model_instance is None: + raise ValueError('A model_instance must be specified') + + self.validate_model_instance_string(model_instance) + urls = model_instance.split('/') + [owner_slug, model_slug, framework, instance_slug] = urls + + response = self.process_response( + self.model_instance_files_with_http_info( + owner_slug=owner_slug, + model_slug=model_slug, + framework=framework, + instance_slug=instance_slug, + page_size=page_size, + page_token=page_token, + _preload_content=True)) + + if response: + next_page_token = response['nextPageToken'] + if next_page_token: + print('Next Page Token = {}'.format(next_page_token)) + return FileList(response) + else: + print('No files found') + + def model_instance_files_cli(self, + model_instance, + page_token=None, + page_size=20, + csv_display=False): + """ client wrapper for model_instance_files. + + Parameters + ========== + model_instance: the string identified of the model instance version + should be in format [owner]/[model-name]/[framework]/[instance-slug] + page_token: token for pagination + page_size: the number of items per page + csv_display: if True, print comma separated values instead of table + """ + result = self.model_instance_files( + model_instance, + page_token=page_token, + page_size=page_size, + csv_display=csv_display) + if result and result.files is not None: + fields = ['name', 'size', 'creationDate'] + if csv_display: + self.print_csv(result.files, fields) + else: + self.print_table(result.files, fields) + def model_instance_update(self, folder): """ update a model instance. Parameters @@ -3154,9 +3336,8 @@ def model_instance_update(self, folder): 'modelInstanceType', None) base_model_instance = self.get_or_default(meta_data, 'baseModelInstance', None) - external_base_model_url = self.get_or_default(meta_data, - 'externalBaseModelUrl', - None) + external_base_model_url = self.get_or_default( + meta_data, 'externalBaseModelUrl', None) # validations if owner_slug == 'INSERT_OWNER_SLUG_HERE': @@ -3255,8 +3436,8 @@ def model_instance_version_create(self, owner_slug, model_slug, framework, instance_slug = self.split_model_instance_string( model_instance) - request = ModelInstanceNewVersionRequest(version_notes=version_notes, - files=[]) + request = ModelInstanceNewVersionRequest( + version_notes=version_notes, files=[]) with ResumableUploadContext() as upload_context: self.upload_files(request, None, folder, ApiBlobType.MODEL, @@ -3286,9 +3467,8 @@ def model_instance_version_create_cli(self, quiet: suppress verbose output (default is False) dir_mode: what to do with directories: "skip" - ignore; "zip" - compress and upload """ - result = self.model_instance_version_create(model_instance, folder, - version_notes, quiet, - dir_mode) + result = self.model_instance_version_create( + model_instance, folder, version_notes, quiet, dir_mode) if result.hasId: print('Your model instance version was created. Url={}'.format( @@ -3380,11 +3560,81 @@ def model_instance_version_download_cli(self, quiet: suppress verbose output (default is False) untar: if True, untar files upon download (default is False) """ - return self.model_instance_version_download(model_instance_version, - path=path, - untar=untar, - force=force, - quiet=quiet) + return self.model_instance_version_download( + model_instance_version, + path=path, + untar=untar, + force=force, + quiet=quiet) + + def model_instance_version_files(self, + model_instance_version, + page_token=None, + page_size=20, + csv_display=False): + """ list all files for a model instance version + + Parameters + ========== + model_instance_version: the string identifier of the model instance version + should be in format [owner]/[model-name]/[framework]/[instance-slug]/[version-number] + page_token: token for pagination + page_size: the number of items per page + csv_display: if True, print comma separated values instead of table + """ + if model_instance_version is None: + raise ValueError('A model_instance_version must be specified') + + self.validate_model_instance_version_string(model_instance_version) + urls = model_instance_version.split('/') + [owner_slug, model_slug, framework, instance_slug, + version_number] = urls + + response = self.process_response( + self.model_instance_version_files_with_http_info( + owner_slug=owner_slug, + model_slug=model_slug, + framework=framework, + instance_slug=instance_slug, + version_number=version_number, + page_size=page_size, + page_token=page_token, + _preload_content=True)) + + if response: + next_page_token = response['nextPageToken'] + if next_page_token: + print('Next Page Token = {}'.format(next_page_token)) + return FileList(response) + else: + print('No files found') + + def model_instance_version_files_cli(self, + model_instance_version, + page_token=None, + page_size=20, + csv_display=False): + """ client wrapper for model_instance_version_files. + + Parameters + ========== + model_instance_version: the string identified of the model instance version + should be in format [owner]/[model-name]/[framework]/[instance-slug]/[version-number] + page_token: token for pagination + page_size: the number of items per page + csv_display: if True, print comma separated values instead of table + """ + result = self.model_instance_version_files( + model_instance_version, + page_token=page_token, + page_size=page_size, + csv_display=csv_display) + if result and result.files is not None: + fields = ['name', 'size', 'creationDate'] + if csv_display: + self.print_csv(result.files, fields) + else: + self.print_table(result.files, fields) def model_instance_version_delete(self, model_instance_version, yes): """ call to delete a model instance version from the API @@ -3443,9 +3693,8 @@ def files_upload_cli(self, local_paths, inbox_path, no_resume, files_to_create = [] with ResumableUploadContext(no_resume) as upload_context: for local_path in local_paths: - (upload_file, - file_name) = self.file_upload_cli(local_path, inbox_path, - no_compress, upload_context) + (upload_file, file_name) = self.file_upload_cli( + local_path, inbox_path, no_compress, upload_context) if upload_file is None: continue @@ -3467,10 +3716,9 @@ def file_upload_cli(self, local_path, inbox_path, no_compress, file_or_folder_name = os.path.basename(full_path) dir_mode = 'tar' if no_compress else 'zip' - upload_file = self._upload_file_or_folder(parent_path, - file_or_folder_name, - ApiBlobType.INBOX, - upload_context, dir_mode) + upload_file = self._upload_file_or_folder( + parent_path, file_or_folder_name, ApiBlobType.INBOX, + upload_context, dir_mode) return (upload_file, file_or_folder_name) def print_obj(self, obj, indent=2): @@ -3517,9 +3765,11 @@ def print_table(self, items, fields): """ formats = [] borders = [] + if len(items) == 0: + return for f in fields: - length = max(len(f), - max([len(self.string(getattr(i, f))) for i in items])) + length = max( + len(f), max([len(self.string(getattr(i, f))) for i in items])) justify = '>' if isinstance(getattr( items[0], f), int) or f == 'size' or f == 'reward' else '<' formats.append('{:' + justify + self.string(length + 2) + '}') @@ -3671,10 +3921,9 @@ def upload_files(self, self.MODEL_INSTANCE_METADATA_FILE ]): continue - upload_file = self._upload_file_or_folder(folder, file_name, - blob_type, - upload_context, dir_mode, - quiet, resources) + upload_file = self._upload_file_or_folder( + folder, file_name, blob_type, upload_context, dir_mode, quiet, + resources) if upload_file is not None: request.files.append(upload_file) @@ -3756,9 +4005,9 @@ def process_column(self, column): ========== column: a list of values in a column to be processed """ - processed_column = DatasetColumn(name=self.get_or_fail(column, 'name'), - description=self.get_or_default( - column, 'description', '')) + processed_column = DatasetColumn( + name=self.get_or_fail(column, 'name'), + description=self.get_or_default(column, 'description', '')) if 'type' in column: original_type = column['type'].lower() processed_column.original_type = original_type @@ -3802,11 +4051,12 @@ def upload_complete(self, path, url, quiet, resume=False): start_at = resumable_upload_result.start_at upload_size = file_size - start_at - with tqdm(total=upload_size, - unit='B', - unit_scale=True, - unit_divisor=1024, - disable=quiet) as progress_bar: + with tqdm( + total=upload_size, + unit='B', + unit_scale=True, + unit_divisor=1024, + disable=quiet) as progress_bar: with io.open(path, 'rb', buffering=0) as fp: session = requests.Session() if start_at > 0: @@ -3815,8 +4065,8 @@ def upload_complete(self, path, url, quiet, resume=False): 'Content-Length': '%d' % upload_size, 'Content-Range': - 'bytes %d-%d/%d' % - (start_at, file_size - 1, file_size) + 'bytes %d-%d/%d' % (start_at, file_size - 1, + file_size) }) reader = TqdmBufferedReader(fp, progress_bar) retries = Retry(total=10, backoff_factor=0.5) @@ -3922,7 +4172,7 @@ def split_dataset_string(self, dataset): else: return urls[0], urls[1], None else: - return self.get_config_value(self.CONFIG_NAME_USER), dataset + return self.get_config_value(self.CONFIG_NAME_USER), dataset, None def validate_model_string(self, model): """ determine if a model string is valid, meaning it is in the format @@ -4140,7 +4390,6 @@ def confirmation(self): class TqdmBufferedReader(io.BufferedReader): - def __init__(self, raw, progress_bar): """ helper class to implement an io.BufferedReader Parameters @@ -4167,3 +4416,24 @@ def increment(self, length): length: bytes to increment the reader by """ self.progress_bar.update(length) + + +class FileList(object): + def __init__(self, init_dict): + self.error_message = '' + files = init_dict['files'] + if files: + for f in files: + if 'size' in f: + f['totalBytes'] = f['size'] + self.files = [File(f) for f in files] + else: + self.files = [] + token = init_dict['nextPageToken'] + if token: + self.nextPageToken = token + else: + self.nextPageToken = "" + + def __repr__(self): + return '' diff --git a/src/kaggle/cli.py b/src/kaggle/cli.py index e03ea72..0fe5e65 100644 --- a/src/kaggle/cli.py +++ b/src/kaggle/cli.py @@ -28,14 +28,14 @@ def main(): parser = argparse.ArgumentParser( formatter_class=argparse.RawTextHelpFormatter) - parser.add_argument('-v', - '--version', - action='version', - version='Kaggle API ' + KaggleApi.__version__) - - subparsers = parser.add_subparsers(title='commands', - help=Help.kaggle, - dest='command') + parser.add_argument( + '-v', + '--version', + action='version', + version='Kaggle API ' + KaggleApi.__version__) + + subparsers = parser.add_subparsers( + title='commands', help=Help.kaggle, dest='command') subparsers.required = True subparsers.choices = Help.kaggle_choices parse_competitions(subparsers) @@ -121,22 +121,25 @@ def parse_competitions(subparsers): dest='sort_by', required=False, help=Help.param_competition_sort_by) - parser_competitions_list_optional.add_argument('-p', - '--page', - dest='page', - default=1, - required=False, - help=Help.param_page) - parser_competitions_list_optional.add_argument('-s', - '--search', - dest='search', - required=False, - help=Help.param_search) - parser_competitions_list_optional.add_argument('-v', - '--csv', - dest='csv_display', - action='store_true', - help=Help.param_csv) + parser_competitions_list_optional.add_argument( + '-p', + '--page', + dest='page', + default=1, + required=False, + help=Help.param_page) + parser_competitions_list_optional.add_argument( + '-s', + '--search', + dest='search', + required=False, + help=Help.param_search) + parser_competitions_list_optional.add_argument( + '-v', + '--csv', + dest='csv_display', + action='store_true', + help=Help.param_csv) parser_competitions_list._action_groups.append( parser_competitions_list_optional) parser_competitions_list.set_defaults(func=api.competitions_list_cli) @@ -150,21 +153,35 @@ def parse_competitions(subparsers): ) parser_competitions_files_optional.add_argument( 'competition', nargs='?', default=None, help=Help.param_competition) - parser_competitions_files_optional.add_argument('-c', - '--competition', - dest='competition_opt', - required=False, - help=argparse.SUPPRESS) - parser_competitions_files_optional.add_argument('-v', - '--csv', - dest='csv_display', - action='store_true', - help=Help.param_csv) - parser_competitions_files_optional.add_argument('-q', - '--quiet', - dest='quiet', - action='store_true', - help=Help.param_quiet) + parser_competitions_files_optional.add_argument( + '-c', + '--competition', + dest='competition_opt', + required=False, + help=argparse.SUPPRESS) + parser_competitions_files_optional.add_argument( + '-v', + '--csv', + dest='csv_display', + action='store_true', + help=Help.param_csv) + parser_competitions_files_optional.add_argument( + '-q', + '--quiet', + dest='quiet', + action='store_true', + help=Help.param_quiet) + parser_competitions_files_optional.add_argument( + '--page-token', + dest='page_token', + required=False, + help=Help.param_page_token) + parser_competitions_files_optional.add_argument( + '--page-size', + dest='page_size', + required=False, + default=20, + help=Help.param_page_size) parser_competitions_files._action_groups.append( parser_competitions_files_optional) parser_competitions_files.set_defaults(func=api.competition_list_files_cli) @@ -178,11 +195,12 @@ def parse_competitions(subparsers): ) parser_competitions_download_optional.add_argument( 'competition', nargs='?', default=None, help=Help.param_competition) - parser_competitions_download_optional.add_argument('-c', - '--competition', - dest='competition_opt', - required=False, - help=argparse.SUPPRESS) + parser_competitions_download_optional.add_argument( + '-c', + '--competition', + dest='competition_opt', + required=False, + help=argparse.SUPPRESS) parser_competitions_download_optional.add_argument( '-f', '--file', @@ -195,23 +213,26 @@ def parse_competitions(subparsers): dest='path', required=False, help=Help.param_downfolder) - parser_competitions_download_optional.add_argument('-w', - '--wp', - dest='path', - action='store_const', - const='.', - required=False, - help=Help.param_wp) - parser_competitions_download_optional.add_argument('-o', - '--force', - dest='force', - action='store_true', - help=Help.param_force) - parser_competitions_download_optional.add_argument('-q', - '--quiet', - dest='quiet', - action='store_true', - help=Help.param_quiet) + parser_competitions_download_optional.add_argument( + '-w', + '--wp', + dest='path', + action='store_const', + const='.', + required=False, + help=Help.param_wp) + parser_competitions_download_optional.add_argument( + '-o', + '--force', + dest='force', + action='store_true', + help=Help.param_force) + parser_competitions_download_optional.add_argument( + '-q', + '--quiet', + dest='quiet', + action='store_true', + help=Help.param_quiet) parser_competitions_download._action_groups.append( parser_competitions_download_optional) parser_competitions_download.set_defaults( @@ -228,27 +249,30 @@ def parse_competitions(subparsers): 'required arguments') parser_competitions_submit_optional.add_argument( 'competition', nargs='?', default=None, help=Help.param_competition) - parser_competitions_submit_optional.add_argument('-c', - '--competition', - dest='competition_opt', - required=False, - help=argparse.SUPPRESS) - parser_competitions_submit_required.add_argument('-f', - '--file', - dest='file_name', - required=True, - help=Help.param_upfile) + parser_competitions_submit_optional.add_argument( + '-c', + '--competition', + dest='competition_opt', + required=False, + help=argparse.SUPPRESS) + parser_competitions_submit_required.add_argument( + '-f', + '--file', + dest='file_name', + required=True, + help=Help.param_upfile) parser_competitions_submit_required.add_argument( '-m', '--message', dest='message', required=True, help=Help.param_competition_message) - parser_competitions_submit_optional.add_argument('-q', - '--quiet', - dest='quiet', - action='store_true', - help=Help.param_quiet) + parser_competitions_submit_optional.add_argument( + '-q', + '--quiet', + dest='quiet', + action='store_true', + help=Help.param_quiet) parser_competitions_submit._action_groups.append( parser_competitions_submit_optional) parser_competitions_submit.set_defaults(func=api.competition_submit_cli) @@ -268,11 +292,12 @@ def parse_competitions(subparsers): dest='competition_opt', required=False, help=argparse.SUPPRESS) - parser_competitions_submissions_optional.add_argument('-v', - '--csv', - dest='csv_display', - action='store_true', - help=Help.param_csv) + parser_competitions_submissions_optional.add_argument( + '-v', + '--csv', + dest='csv_display', + action='store_true', + help=Help.param_csv) parser_competitions_submissions_optional.add_argument( '-q', '--quiet', @@ -313,11 +338,12 @@ def parse_competitions(subparsers): help=Help.param_competition_leaderboard_download) parser_competitions_leaderboard_optional.add_argument( '-p', '--path', dest='path', help=Help.param_downfolder) - parser_competitions_leaderboard_optional.add_argument('-v', - '--csv', - dest='csv_display', - action='store_true', - help=Help.param_csv) + parser_competitions_leaderboard_optional.add_argument( + '-v', + '--csv', + dest='csv_display', + action='store_true', + help=Help.param_csv) parser_competitions_leaderboard_optional.add_argument( '-q', '--quiet', @@ -342,8 +368,8 @@ def parse_datasets(subparsers): formatter_class=argparse.RawTextHelpFormatter, help=Help.group_datasets, aliases=['d']) - subparsers_datasets = parser_datasets.add_subparsers(title='commands', - dest='command') + subparsers_datasets = parser_datasets.add_subparsers( + title='commands', dest='command') subparsers_datasets.required = True subparsers_datasets.choices = Help.datasets_choices @@ -353,59 +379,58 @@ def parse_datasets(subparsers): formatter_class=argparse.RawTextHelpFormatter, help=Help.command_datasets_list) parser_datasets_list_optional = parser_datasets_list._action_groups.pop() - parser_datasets_list.add_argument('--sort-by', - dest='sort_by', - required=False, - help=Help.param_dataset_sort_by) - parser_datasets_list.add_argument('--size', - dest='size', - required=False, - help=Help.param_dataset_size) - parser_datasets_list.add_argument('--file-type', - dest='file_type', - required=False, - help=Help.param_dataset_file_type) - parser_datasets_list.add_argument('--license', - dest='license_name', - required=False, - help=Help.param_dataset_license) - parser_datasets_list.add_argument('--tags', - dest='tag_ids', - required=False, - help=Help.param_dataset_tags) - parser_datasets_list.add_argument('-s', - '--search', - dest='search', - required=False, - help=Help.param_search) - parser_datasets_list.add_argument('-m', - '--mine', - dest='mine', - action='store_true', - help=Help.param_mine) - parser_datasets_list.add_argument('--user', - dest='user', - required=False, - help=Help.param_dataset_user) - parser_datasets_list.add_argument('-p', - '--page', - dest='page', - default=1, - required=False, - help=Help.param_page) - parser_datasets_list.add_argument('-v', - '--csv', - dest='csv_display', - action='store_true', - help=Help.param_csv) - parser_datasets_list.add_argument('--max-size', - dest='max_size', - required=False, - help=Help.param_dataset_maxsize) - parser_datasets_list.add_argument('--min-size', - dest='min_size', - required=False, - help=Help.param_dataset_minsize) + parser_datasets_list.add_argument( + '--sort-by', + dest='sort_by', + required=False, + help=Help.param_dataset_sort_by) + parser_datasets_list.add_argument( + '--size', dest='size', required=False, help=Help.param_dataset_size) + parser_datasets_list.add_argument( + '--file-type', + dest='file_type', + required=False, + help=Help.param_dataset_file_type) + parser_datasets_list.add_argument( + '--license', + dest='license_name', + required=False, + help=Help.param_dataset_license) + parser_datasets_list.add_argument( + '--tags', dest='tag_ids', required=False, help=Help.param_dataset_tags) + parser_datasets_list.add_argument( + '-s', + '--search', + dest='search', + required=False, + help=Help.param_search) + parser_datasets_list.add_argument( + '-m', '--mine', dest='mine', action='store_true', help=Help.param_mine) + parser_datasets_list.add_argument( + '--user', dest='user', required=False, help=Help.param_dataset_user) + parser_datasets_list.add_argument( + '-p', + '--page', + dest='page', + default=1, + required=False, + help=Help.param_page) + parser_datasets_list.add_argument( + '-v', + '--csv', + dest='csv_display', + action='store_true', + help=Help.param_csv) + parser_datasets_list.add_argument( + '--max-size', + dest='max_size', + required=False, + help=Help.param_dataset_maxsize) + parser_datasets_list.add_argument( + '--min-size', + dest='min_size', + required=False, + help=Help.param_dataset_minsize) parser_datasets_list._action_groups.append(parser_datasets_list_optional) parser_datasets_list.set_defaults(func=api.dataset_list_cli) @@ -415,20 +440,31 @@ def parse_datasets(subparsers): formatter_class=argparse.RawTextHelpFormatter, help=Help.command_datasets_files) parser_datasets_files_optional = parser_datasets_files._action_groups.pop() - parser_datasets_files_optional.add_argument('dataset', - nargs='?', - default=None, - help=Help.param_dataset) - parser_datasets_files_optional.add_argument('-d', - '--dataset', - dest='dataset_opt', - required=False, - help=argparse.SUPPRESS) - parser_datasets_files_optional.add_argument('-v', - '--csv', - dest='csv_display', - action='store_true', - help=Help.param_csv) + parser_datasets_files_optional.add_argument( + 'dataset', nargs='?', default=None, help=Help.param_dataset) + parser_datasets_files_optional.add_argument( + '-d', + '--dataset', + dest='dataset_opt', + required=False, + help=argparse.SUPPRESS) + parser_datasets_files_optional.add_argument( + '-v', + '--csv', + dest='csv_display', + action='store_true', + help=Help.param_csv) + parser_datasets_files_optional.add_argument( + '--page-token', + dest='page_token', + required=False, + help=Help.param_page_token) + parser_datasets_files_optional.add_argument( + '--page-size', + dest='page_size', + required=False, + default=20, + help=Help.param_page_size) parser_datasets_files._action_groups.append(parser_datasets_files_optional) parser_datasets_files.set_defaults(func=api.dataset_list_files_cli) @@ -439,47 +475,48 @@ def parse_datasets(subparsers): help=Help.command_datasets_download) parser_datasets_download_optional = parser_datasets_download._action_groups.pop( ) - parser_datasets_download_optional.add_argument('dataset', - nargs='?', - default=None, - help=Help.param_dataset) - parser_datasets_download_optional.add_argument('-d', - '--dataset', - dest='dataset_opt', - required=False, - help=argparse.SUPPRESS) + parser_datasets_download_optional.add_argument( + 'dataset', nargs='?', default=None, help=Help.param_dataset) + parser_datasets_download_optional.add_argument( + '-d', + '--dataset', + dest='dataset_opt', + required=False, + help=argparse.SUPPRESS) parser_datasets_download_optional.add_argument( '-f', '--file', dest='file_name', required=False, help=Help.param_dataset_file) - parser_datasets_download_optional.add_argument('-p', - '--path', - dest='path', - required=False, - help=Help.param_downfolder) - parser_datasets_download_optional.add_argument('-w', - '--wp', - dest='path', - action='store_const', - const='.', - required=False, - help=Help.param_wp) - parser_datasets_download_optional.add_argument('--unzip', - dest='unzip', - action='store_true', - help=Help.param_unzip) - parser_datasets_download_optional.add_argument('-o', - '--force', - dest='force', - action='store_true', - help=Help.param_force) - parser_datasets_download_optional.add_argument('-q', - '--quiet', - dest='quiet', - action='store_true', - help=Help.param_quiet) + parser_datasets_download_optional.add_argument( + '-p', + '--path', + dest='path', + required=False, + help=Help.param_downfolder) + parser_datasets_download_optional.add_argument( + '-w', + '--wp', + dest='path', + action='store_const', + const='.', + required=False, + help=Help.param_wp) + parser_datasets_download_optional.add_argument( + '--unzip', dest='unzip', action='store_true', help=Help.param_unzip) + parser_datasets_download_optional.add_argument( + '-o', + '--force', + dest='force', + action='store_true', + help=Help.param_force) + parser_datasets_download_optional.add_argument( + '-q', + '--quiet', + dest='quiet', + action='store_true', + help=Help.param_quiet) parser_datasets_download._action_groups.append( parser_datasets_download_optional) parser_datasets_download.set_defaults(func=api.dataset_download_cli) @@ -497,21 +534,24 @@ def parse_datasets(subparsers): dest='folder', required=False, help=Help.param_dataset_upfile) - parser_datasets_create_optional.add_argument('-u', - '--public', - dest='public', - action='store_true', - help=Help.param_public) - parser_datasets_create_optional.add_argument('-q', - '--quiet', - dest='quiet', - action='store_true', - help=Help.param_quiet) - parser_datasets_create_optional.add_argument('-t', - '--keep-tabular', - dest='convert_to_csv', - action='store_false', - help=Help.param_keep_tabular) + parser_datasets_create_optional.add_argument( + '-u', + '--public', + dest='public', + action='store_true', + help=Help.param_public) + parser_datasets_create_optional.add_argument( + '-q', + '--quiet', + dest='quiet', + action='store_true', + help=Help.param_quiet) + parser_datasets_create_optional.add_argument( + '-t', + '--keep-tabular', + dest='convert_to_csv', + action='store_false', + help=Help.param_keep_tabular) parser_datasets_create_optional.add_argument( '-r', '--dir-mode', @@ -544,16 +584,18 @@ def parse_datasets(subparsers): dest='folder', required=False, help=Help.param_dataset_upfile) - parser_datasets_version_optional.add_argument('-q', - '--quiet', - dest='quiet', - action='store_true', - help=Help.param_quiet) - parser_datasets_version_optional.add_argument('-t', - '--keep-tabular', - dest='convert_to_csv', - action='store_false', - help=Help.param_keep_tabular) + parser_datasets_version_optional.add_argument( + '-q', + '--quiet', + dest='quiet', + action='store_true', + help=Help.param_quiet) + parser_datasets_version_optional.add_argument( + '-t', + '--keep-tabular', + dest='convert_to_csv', + action='store_false', + help=Help.param_keep_tabular) parser_datasets_version_optional.add_argument( '-r', '--dir-mode', @@ -577,11 +619,12 @@ def parse_datasets(subparsers): formatter_class=argparse.RawTextHelpFormatter, help=Help.command_datasets_init) parser_datasets_init_optional = parser_datasets_init._action_groups.pop() - parser_datasets_init_optional.add_argument('-p', - '--path', - dest='folder', - required=False, - help=Help.param_dataset_upfile) + parser_datasets_init_optional.add_argument( + '-p', + '--path', + dest='folder', + required=False, + help=Help.param_dataset_upfile) parser_datasets_init._action_groups.append(parser_datasets_init_optional) parser_datasets_init.set_defaults(func=api.dataset_initialize_cli) @@ -592,15 +635,14 @@ def parse_datasets(subparsers): help=Help.command_datasets_metadata) parser_datasets_metadata_optional = parser_datasets_metadata._action_groups.pop( ) - parser_datasets_metadata_optional.add_argument('dataset', - nargs='?', - default=None, - help=Help.param_dataset) - parser_datasets_metadata_optional.add_argument('-d', - '--dataset', - dest='dataset_opt', - required=False, - help=argparse.SUPPRESS) + parser_datasets_metadata_optional.add_argument( + 'dataset', nargs='?', default=None, help=Help.param_dataset) + parser_datasets_metadata_optional.add_argument( + '-d', + '--dataset', + dest='dataset_opt', + required=False, + help=argparse.SUPPRESS) parser_datasets_metadata_optional.add_argument( '--update', dest='update', @@ -619,15 +661,14 @@ def parse_datasets(subparsers): help=Help.command_datasets_status) parser_datasets_status_optional = parser_datasets_status._action_groups.pop( ) - parser_datasets_status_optional.add_argument('dataset', - nargs='?', - default=None, - help=Help.param_dataset) - parser_datasets_status_optional.add_argument('-d', - '--dataset', - dest='dataset_opt', - required=False, - help=argparse.SUPPRESS) + parser_datasets_status_optional.add_argument( + 'dataset', nargs='?', default=None, help=Help.param_dataset) + parser_datasets_status_optional.add_argument( + '-d', + '--dataset', + dest='dataset_opt', + required=False, + help=argparse.SUPPRESS) parser_datasets_status._action_groups.append( parser_datasets_status_optional) parser_datasets_status.set_defaults(func=api.dataset_status_cli) @@ -645,8 +686,8 @@ def parse_kernels(subparsers): formatter_class=argparse.RawTextHelpFormatter, help=Help.group_kernels, aliases=['k']) - subparsers_kernels = parser_kernels.add_subparsers(title='commands', - dest='command') + subparsers_kernels = parser_kernels.add_subparsers( + title='commands', dest='command') subparsers_kernels.required = True subparsers_kernels.choices = Help.kernels_choices @@ -656,77 +697,99 @@ def parse_kernels(subparsers): formatter_class=argparse.RawTextHelpFormatter, help=Help.command_kernels_list) parser_kernels_list_optional = parser_kernels_list._action_groups.pop() - parser_kernels_list_optional.add_argument('-m', - '--mine', - dest='mine', - action='store_true', - help=Help.param_mine) - parser_kernels_list_optional.add_argument('-p', - '--page', - dest='page', - default=1, - help=Help.param_page) - parser_kernels_list_optional.add_argument('--page-size', - dest='page_size', - default=20, - help=Help.param_page_size) - parser_kernels_list_optional.add_argument('-s', - '--search', - dest='search', - help=Help.param_search) - parser_kernels_list_optional.add_argument('-v', - '--csv', - dest='csv_display', - action='store_true', - help=Help.param_csv) - parser_kernels_list_optional.add_argument('--parent', - dest='parent', - required=False, - help=Help.param_kernel_parent) + parser_kernels_list_optional.add_argument( + '-m', '--mine', dest='mine', action='store_true', help=Help.param_mine) + parser_kernels_list_optional.add_argument( + '-p', '--page', dest='page', default=1, help=Help.param_page) + parser_kernels_list_optional.add_argument( + '--page-size', dest='page_size', default=20, help=Help.param_page_size) + parser_kernels_list_optional.add_argument( + '-s', '--search', dest='search', help=Help.param_search) + parser_kernels_list_optional.add_argument( + '-v', + '--csv', + dest='csv_display', + action='store_true', + help=Help.param_csv) + parser_kernels_list_optional.add_argument( + '--parent', + dest='parent', + required=False, + help=Help.param_kernel_parent) parser_kernels_list_optional.add_argument( '--competition', dest='competition', required=False, help=Help.param_kernel_competition) - parser_kernels_list_optional.add_argument('--dataset', - dest='dataset', - required=False, - help=Help.param_kernel_dataset) - parser_kernels_list_optional.add_argument('--user', - dest='user', - required=False, - help=Help.param_kernel_user) - parser_kernels_list_optional.add_argument('--language', - dest='language', - required=False, - help=Help.param_kernel_language) - parser_kernels_list_optional.add_argument('--kernel-type', - dest='kernel_type', - required=False, - help=Help.param_kernel_type) + parser_kernels_list_optional.add_argument( + '--dataset', + dest='dataset', + required=False, + help=Help.param_kernel_dataset) + parser_kernels_list_optional.add_argument( + '--user', dest='user', required=False, help=Help.param_kernel_user) + parser_kernels_list_optional.add_argument( + '--language', + dest='language', + required=False, + help=Help.param_kernel_language) + parser_kernels_list_optional.add_argument( + '--kernel-type', + dest='kernel_type', + required=False, + help=Help.param_kernel_type) parser_kernels_list_optional.add_argument( '--output-type', dest='output_type', required=False, help=Help.param_kernel_output_type) - parser_kernels_list_optional.add_argument('--sort-by', - dest='sort_by', - required=False, - help=Help.param_kernel_sort_by) + parser_kernels_list_optional.add_argument( + '--sort-by', + dest='sort_by', + required=False, + help=Help.param_kernel_sort_by) parser_kernels_list._action_groups.append(parser_kernels_list_optional) parser_kernels_list.set_defaults(func=api.kernels_list_cli) + # Kernels file list + parser_kernels_files = subparsers_kernels.add_parser( + 'files', + formatter_class=argparse.RawTextHelpFormatter, + help=Help.command_kernels_files) + parser_kernels_files_optional = parser_kernels_files._action_groups.pop() + parser_kernels_files_optional.add_argument( + 'kernel', nargs='?', default=None, help=Help.param_kernel) + parser_kernels_files_optional.add_argument( + '-k', + '--kernel', + dest='kernel_opt', + required=False, + help=argparse.SUPPRESS) + parser_kernels_files_optional.add_argument( + '-v', + '--csv', + dest='csv_display', + action='store_true', + help=Help.param_csv) + parser_kernels_files_optional.add_argument( + '--page-token', dest='page_token', help=Help.param_page_token) + parser_kernels_files_optional.add_argument( + '--page-size', dest='page_size', default=20, help=Help.param_page_size) + parser_kernels_files._action_groups.append(parser_kernels_files_optional) + parser_kernels_files.set_defaults(func=api.kernels_list_files_cli) + # Kernels init parser_kernels_init = subparsers_kernels.add_parser( 'init', formatter_class=argparse.RawTextHelpFormatter, help=Help.command_kernels_init) parser_kernels_init_optional = parser_kernels_init._action_groups.pop() - parser_kernels_init_optional.add_argument('-p', - '--path', - dest='folder', - required=False, - help=Help.param_kernel_upfile) + parser_kernels_init_optional.add_argument( + '-p', + '--path', + dest='folder', + required=False, + help=Help.param_kernel_upfile) parser_kernels_init._action_groups.append(parser_kernels_init_optional) parser_kernels_init.set_defaults(func=api.kernels_initialize_cli) @@ -736,11 +799,12 @@ def parse_kernels(subparsers): formatter_class=argparse.RawTextHelpFormatter, help=Help.command_kernels_push) parser_kernels_push_optional = parser_kernels_push._action_groups.pop() - parser_kernels_push_optional.add_argument('-p', - '--path', - dest='folder', - required=False, - help=Help.param_kernel_upfile) + parser_kernels_push_optional.add_argument( + '-p', + '--path', + dest='folder', + required=False, + help=Help.param_kernel_upfile) parser_kernels_push._action_groups.append(parser_kernels_push_optional) parser_kernels_push.set_defaults(func=api.kernels_push_cli) @@ -750,27 +814,28 @@ def parse_kernels(subparsers): formatter_class=argparse.RawTextHelpFormatter, help=Help.command_kernels_pull) parser_kernels_pull_optional = parser_kernels_pull._action_groups.pop() - parser_kernels_pull_optional.add_argument('kernel', - nargs='?', - default=None, - help=Help.param_kernel) - parser_kernels_pull_optional.add_argument('-k', - '--kernel', - dest='kernel', - required=False, - help=argparse.SUPPRESS) - parser_kernels_pull_optional.add_argument('-p', - '--path', - dest='path', - required=False, - help=Help.param_downfolder) - parser_kernels_pull_optional.add_argument('-w', - '--wp', - dest='path', - action='store_const', - const='.', - required=False, - help=Help.param_wp) + parser_kernels_pull_optional.add_argument( + 'kernel', nargs='?', default=None, help=Help.param_kernel) + parser_kernels_pull_optional.add_argument( + '-k', + '--kernel', + dest='kernel', + required=False, + help=argparse.SUPPRESS) + parser_kernels_pull_optional.add_argument( + '-p', + '--path', + dest='path', + required=False, + help=Help.param_downfolder) + parser_kernels_pull_optional.add_argument( + '-w', + '--wp', + dest='path', + action='store_const', + const='.', + required=False, + help=Help.param_wp) parser_kernels_pull_optional.add_argument( '-m', '--metadata', @@ -786,39 +851,42 @@ def parse_kernels(subparsers): formatter_class=argparse.RawTextHelpFormatter, help=Help.command_kernels_output) parser_kernels_output_optional = parser_kernels_output._action_groups.pop() - parser_kernels_output_optional.add_argument('kernel', - nargs='?', - default=None, - help=Help.param_kernel) - parser_kernels_output_optional.add_argument('-k', - '--kernel', - dest='kernel_opt', - required=False, - help=argparse.SUPPRESS) - parser_kernels_output_optional.add_argument('-p', - '--path', - dest='path', - required=False, - help=Help.param_downfolder) - parser_kernels_output_optional.add_argument('-w', - '--wp', - dest='path', - action='store_const', - const='.', - required=False, - help=Help.param_wp) - parser_kernels_output_optional.add_argument('-o', - '--force', - dest='force', - action='store_true', - required=False, - help=Help.param_force) - parser_kernels_output_optional.add_argument('-q', - '--quiet', - dest='quiet', - action='store_true', - required=False, - help=Help.param_quiet) + parser_kernels_output_optional.add_argument( + 'kernel', nargs='?', default=None, help=Help.param_kernel) + parser_kernels_output_optional.add_argument( + '-k', + '--kernel', + dest='kernel_opt', + required=False, + help=argparse.SUPPRESS) + parser_kernels_output_optional.add_argument( + '-p', + '--path', + dest='path', + required=False, + help=Help.param_downfolder) + parser_kernels_output_optional.add_argument( + '-w', + '--wp', + dest='path', + action='store_const', + const='.', + required=False, + help=Help.param_wp) + parser_kernels_output_optional.add_argument( + '-o', + '--force', + dest='force', + action='store_true', + required=False, + help=Help.param_force) + parser_kernels_output_optional.add_argument( + '-q', + '--quiet', + dest='quiet', + action='store_true', + required=False, + help=Help.param_quiet) parser_kernels_output._action_groups.append(parser_kernels_output_optional) parser_kernels_output.set_defaults(func=api.kernels_output_cli) @@ -828,15 +896,14 @@ def parse_kernels(subparsers): formatter_class=argparse.RawTextHelpFormatter, help=Help.command_kernels_status) parser_kernels_status_optional = parser_kernels_status._action_groups.pop() - parser_kernels_status_optional.add_argument('kernel', - nargs='?', - default=None, - help=Help.param_kernel) - parser_kernels_status_optional.add_argument('-k', - '--kernel', - dest='kernel_opt', - required=False, - help=argparse.SUPPRESS) + parser_kernels_status_optional.add_argument( + 'kernel', nargs='?', default=None, help=Help.param_kernel) + parser_kernels_status_optional.add_argument( + '-k', + '--kernel', + dest='kernel_opt', + required=False, + help=argparse.SUPPRESS) parser_kernels_status._action_groups.append(parser_kernels_status_optional) parser_kernels_status.set_defaults(func=api.kernels_status_cli) @@ -848,8 +915,8 @@ def parse_models(subparsers): help=Help.group_models, aliases=['m']) - subparsers_models = parser_models.add_subparsers(title='commands', - dest='command') + subparsers_models = parser_models.add_subparsers( + title='commands', dest='command') subparsers_models.required = True subparsers_models.choices = Help.models_choices @@ -863,11 +930,12 @@ def parse_models(subparsers): help=Help.command_models_get) parser_models_get_optional = parser_models_get._action_groups.pop() parser_models_get_optional.add_argument('model', help=Help.param_model) - parser_models_get_optional.add_argument('-p', - '--path', - dest='folder', - required=False, - help=Help.param_model_downfile) + parser_models_get_optional.add_argument( + '-p', + '--path', + dest='folder', + required=False, + help=Help.param_model_downfile) parser_models_get._action_groups.append(parser_models_get_optional) parser_models_get.set_defaults(func=api.model_get_cli) @@ -877,32 +945,32 @@ def parse_models(subparsers): formatter_class=argparse.RawTextHelpFormatter, help=Help.command_models_list) parser_models_list_optional = parser_models_list._action_groups.pop() - parser_models_list.add_argument('--sort-by', - dest='sort_by', - required=False, - help=Help.param_model_sort_by) - parser_models_list.add_argument('-s', - '--search', - dest='search', - required=False, - help=Help.param_search) - parser_models_list.add_argument('--owner', - dest='owner', - required=False, - help=Help.param_model_owner) - parser_models_list.add_argument('--page-size', - dest='page_size', - default=20, - help=Help.param_page_size) - parser_models_list.add_argument('--page-token', - dest='page_token', - required=False, - help=Help.param_page_token) - parser_models_list.add_argument('-v', - '--csv', - dest='csv_display', - action='store_true', - help=Help.param_csv) + parser_models_list.add_argument( + '--sort-by', + dest='sort_by', + required=False, + help=Help.param_model_sort_by) + parser_models_list.add_argument( + '-s', + '--search', + dest='search', + required=False, + help=Help.param_search) + parser_models_list.add_argument( + '--owner', dest='owner', required=False, help=Help.param_model_owner) + parser_models_list.add_argument( + '--page-size', dest='page_size', default=20, help=Help.param_page_size) + parser_models_list.add_argument( + '--page-token', + dest='page_token', + required=False, + help=Help.param_page_token) + parser_models_list.add_argument( + '-v', + '--csv', + dest='csv_display', + action='store_true', + help=Help.param_csv) parser_models_list._action_groups.append(parser_models_list_optional) parser_models_list.set_defaults(func=api.model_list_cli) @@ -912,11 +980,12 @@ def parse_models(subparsers): formatter_class=argparse.RawTextHelpFormatter, help=Help.command_models_init) parser_models_init_optional = parser_models_init._action_groups.pop() - parser_models_init_optional.add_argument('-p', - '--path', - dest='folder', - required=False, - help=Help.param_model_upfile) + parser_models_init_optional.add_argument( + '-p', + '--path', + dest='folder', + required=False, + help=Help.param_model_upfile) parser_models_init._action_groups.append(parser_models_init_optional) parser_models_init.set_defaults(func=api.model_initialize_cli) @@ -926,11 +995,12 @@ def parse_models(subparsers): formatter_class=argparse.RawTextHelpFormatter, help=Help.command_models_new) parser_models_create_optional = parser_models_create._action_groups.pop() - parser_models_create_optional.add_argument('-p', - '--path', - dest='folder', - required=False, - help=Help.param_model_upfile) + parser_models_create_optional.add_argument( + '-p', + '--path', + dest='folder', + required=False, + help=Help.param_model_upfile) parser_models_create._action_groups.append(parser_models_create_optional) parser_models_create.set_defaults(func=api.model_create_new_cli) @@ -941,11 +1011,8 @@ def parse_models(subparsers): help=Help.command_models_delete) parser_models_delete_optional = parser_models_delete._action_groups.pop() parser_models_delete_optional.add_argument('model', help=Help.param_model) - parser_models_delete_optional.add_argument('-y', - '--yes', - dest='yes', - action='store_true', - help=Help.param_yes) + parser_models_delete_optional.add_argument( + '-y', '--yes', dest='yes', action='store_true', help=Help.param_yes) parser_models_delete._action_groups.append(parser_models_delete_optional) parser_models_delete.set_defaults(func=api.model_delete_cli) @@ -955,11 +1022,12 @@ def parse_models(subparsers): formatter_class=argparse.RawTextHelpFormatter, help=Help.command_models_update) parser_models_update_optional = parser_models_update._action_groups.pop() - parser_models_update_optional.add_argument('-p', - '--path', - dest='folder', - required=False, - help=Help.param_model_upfile) + parser_models_update_optional.add_argument( + '-p', + '--path', + dest='folder', + required=False, + help=Help.param_model_upfile) parser_models_update._action_groups.append(parser_models_update_optional) parser_models_update.set_defaults(func=api.model_update_cli) @@ -1029,11 +1097,12 @@ def parse_model_instances(subparsers): dest='folder', required=False, help=Help.param_model_instance_upfile) - parser_model_instances_create_optional.add_argument('-q', - '--quiet', - dest='quiet', - action='store_true', - help=Help.param_quiet) + parser_model_instances_create_optional.add_argument( + '-q', + '--quiet', + dest='quiet', + action='store_true', + help=Help.param_quiet) parser_model_instances_create_optional.add_argument( '-r', '--dir-mode', @@ -1046,6 +1115,33 @@ def parse_model_instances(subparsers): parser_model_instances_create.set_defaults( func=api.model_instance_create_cli) + # Model Instances files + parser_model_instances_files = subparsers_model_instances.add_parser( + 'files', + formatter_class=argparse.RawTextHelpFormatter, + help=Help.command_model_instances_files) + parser_model_instances_files_optional = parser_model_instances_files._action_groups.pop( + ) + parser_model_instances_files_optional.add_argument( + 'model_instance', help=Help.param_model_instance) + parser_model_instances_files_optional.add_argument( + '-v', + '--csv', + dest='csv_display', + action='store_true', + help=Help.param_csv) + parser_model_instances_files_optional.add_argument( + '--page-size', dest='page_size', default=20, help=Help.param_page_size) + parser_model_instances_files_optional.add_argument( + '--page-token', + dest='page_token', + required=False, + help=Help.param_page_token) + parser_model_instances_files._action_groups.append( + parser_model_instances_files_optional) + parser_model_instances_files.set_defaults( + func=api.model_instance_files_cli) + # Models Instances delete parser_model_instances_delete = subparsers_model_instances.add_parser( 'delete', @@ -1055,11 +1151,8 @@ def parse_model_instances(subparsers): ) parser_model_instances_delete_optional.add_argument( 'model_instance', help=Help.param_model_instance) - parser_model_instances_delete_optional.add_argument('-y', - '--yes', - dest='yes', - action='store_true', - help=Help.param_yes) + parser_model_instances_delete_optional.add_argument( + '-y', '--yes', dest='yes', action='store_true', help=Help.param_yes) parser_model_instances_delete._action_groups.append( parser_model_instances_delete_optional) parser_model_instances_delete.set_defaults( @@ -1169,6 +1262,33 @@ def parse_model_instance_versions(subparsers): parser_model_instance_versions_download.set_defaults( func=api.model_instance_version_download_cli) + # Models Instance Versions files + parser_model_instance_versions_files = subparsers_model_intance_versions.add_parser( + 'files', + formatter_class=argparse.RawTextHelpFormatter, + help=Help.command_model_instance_versions_files) + parser_model_instance_versions_files_optional = parser_model_instance_versions_files._action_groups.pop( + ) + parser_model_instance_versions_files_optional.add_argument( + 'model_instance_version', help=Help.param_model_instance_version) + parser_model_instance_versions_files_optional.add_argument( + '-v', + '--csv', + dest='csv_display', + action='store_true', + help=Help.param_csv) + parser_model_instance_versions_files_optional.add_argument( + '--page-size', dest='page_size', default=20, help=Help.param_page_size) + parser_model_instance_versions_files_optional.add_argument( + '--page-token', + dest='page_token', + required=False, + help=Help.param_page_token) + parser_model_instance_versions_files._action_groups.append( + parser_model_instance_versions_files_optional) + parser_model_instance_versions_files.set_defaults( + func=api.model_instance_version_files_cli) + # Models Instance Versions delete parser_model_instance_versions_delete = subparsers_model_intance_versions.add_parser( 'delete', @@ -1193,8 +1313,8 @@ def parse_files(subparsers): help=Help.group_files, aliases=['f']) - subparsers_files = parser_files.add_subparsers(title='commands', - dest='command') + subparsers_files = parser_files.add_subparsers( + title='commands', dest='command') subparsers_files.required = True subparsers_files.choices = Help.files_choices @@ -1240,8 +1360,8 @@ def parse_config(subparsers): 'config', formatter_class=argparse.RawTextHelpFormatter, help=Help.group_config) - subparsers_config = parser_config.add_subparsers(title='commands', - dest='command') + subparsers_config = parser_config.add_subparsers( + title='commands', dest='command') subparsers_config.required = True subparsers_config.choices = Help.config_choices @@ -1258,16 +1378,18 @@ def parse_config(subparsers): parser_config_set._action_groups.pop() parser_config_set_required = parser_config_set.add_argument_group( 'required arguments') - parser_config_set_required.add_argument('-n', - '--name', - dest='name', - required=True, - help=Help.param_config_name) - parser_config_set_required.add_argument('-v', - '--value', - dest='value', - required=True, - help=Help.param_config_value) + parser_config_set_required.add_argument( + '-n', + '--name', + dest='name', + required=True, + help=Help.param_config_name) + parser_config_set_required.add_argument( + '-v', + '--value', + dest='value', + required=True, + help=Help.param_config_value) parser_config_set.set_defaults(func=api.set_config_value) parser_config_unset = subparsers_config.add_parser( @@ -1277,11 +1399,12 @@ def parse_config(subparsers): parser_config_unset._action_groups.pop() parser_config_unset_required = parser_config_unset.add_argument_group( 'required arguments') - parser_config_unset_required.add_argument('-n', - '--name', - dest='name', - required=True, - help=Help.param_config_name) + parser_config_unset_required.add_argument( + '-n', + '--name', + dest='name', + required=True, + help=Help.param_config_name) parser_config_unset.set_defaults(func=api.unset_config_value) @@ -1297,14 +1420,18 @@ class Help(object): 'list', 'files', 'download', 'create', 'version', 'init', 'metadata', 'status' ] - kernels_choices = ['list', 'init', 'push', 'pull', 'output', 'status'] + kernels_choices = [ + 'list', 'files', 'init', 'push', 'pull', 'output', 'status' + ] models_choices = [ 'instances', 'get', 'list', 'init', 'create', 'delete', 'update' ] model_instances_choices = [ - 'versions', 'get', 'init', 'create', 'delete', 'update' + 'versions', 'get', 'files', 'init', 'create', 'delete', 'update' + ] + model_instance_versions_choices = [ + 'init', 'create', 'download', 'delete', 'files' ] - model_instance_versions_choices = ['init', 'create', 'download', 'delete'] files_choices = ['upload'] config_choices = ['view', 'set', 'unset'] @@ -1349,6 +1476,7 @@ class Help(object): command_kernels_list = ( 'List available kernels. By default, shows 20 results sorted by ' 'hotness') + command_kernels_files = 'List kernel output files' command_kernels_init = 'Initialize metadata file for a kernel' command_kernels_push = 'Push new code to a kernel and run the kernel' command_kernels_pull = 'Pull down code from a kernel' @@ -1356,6 +1484,7 @@ class Help(object): command_kernels_status = 'Display the status of the latest kernel run' # Models commands + command_models_files = 'List model files' command_models_get = 'Get a model' command_models_list = 'List models' command_models_init = 'Initialize metadata file for model creation' @@ -1397,9 +1526,10 @@ class Help(object): param_upfile = 'File for upload (full path)' param_csv = 'Print results in CSV format (if not set print in table format)' param_page = 'Page number for results paging. Page size is 20 by default' + # NOTE: Default and max page size are set by the mid-tier code. param_page_size = ( 'Number of items to show on a page. Default size is 20, ' - 'max is 100') + 'max is 200') param_page_token = 'Page token for results paging.' param_search = 'Term(s) to search for' param_mine = 'Display only my items' @@ -1537,6 +1667,7 @@ class Help(object): ) command_model_instances_get = 'Get a model instance' command_model_instances_init = 'Initialize metadata file for model instance creation' + command_model_instances_files = 'List files for the current version of a model instance' command_model_instances_new = 'Create a new model instance' param_model_instance_downfile = ( 'Folder for downloading the special model-instance-metadata.json file ' @@ -1563,6 +1694,7 @@ class Help(object): 'Defaults to current working directory') command_model_instance_versions_delete = 'Delete a model instance version' command_model_instance_versions_download = 'Download model instance version files' + command_model_instance_versions_files = 'List model instance version files' param_model_instance_version_notes = 'Version notes to record for the new model instance version' # Files params @@ -1581,3 +1713,7 @@ class Help(object): ('Value of the configuration parameter, valid values ' 'depending on name\n- competition: ') + param_competition_nonempty + '\n- path: ' + param_downfolder + '\n- proxy: ' + param_proxy) + + +if __name__ == '__main__': + main() diff --git a/src/kaggle/models/kaggle_models_extended.py b/src/kaggle/models/kaggle_models_extended.py index 79c9750..b745fd5 100644 --- a/src/kaggle/models/kaggle_models_extended.py +++ b/src/kaggle/models/kaggle_models_extended.py @@ -21,7 +21,6 @@ class Competition(object): - def __init__(self, init_dict): parsed_dict = {k: parse(v) for k, v in init_dict.items()} self.__dict__.update(parsed_dict) @@ -32,7 +31,6 @@ def __repr__(self): class SubmitResult(object): - def __init__(self, init_dict): parsed_dict = {k: parse(v) for k, v in init_dict.items()} self.__dict__.update(parsed_dict) @@ -42,7 +40,6 @@ def __repr__(self): class Submission(object): - def __init__(self, init_dict): parsed_dict = {k: parse(v) for k, v in init_dict.items()} self.__dict__.update(parsed_dict) @@ -56,7 +53,6 @@ def __repr__(self): class LeaderboardEntry(object): - def __init__(self, init_dict): parsed_dict = {k: parse(v) for k, v in init_dict.items()} self.__dict__.update(parsed_dict) @@ -66,7 +62,6 @@ def __repr__(self): class Dataset(object): - def __init__(self, init_dict): parsed_dict = {k: parse(v) for k, v in init_dict.items()} self.__dict__.update(parsed_dict) @@ -80,7 +75,6 @@ def __repr__(self): class Model(object): - def __init__(self, init_dict): parsed_dict = {k: parse(v) for k, v in init_dict.items()} self.__dict__.update(parsed_dict) @@ -90,7 +84,6 @@ def __repr__(self): class Metadata(object): - def __init__(self, init_info): parsed_info = {k: parse(v) for k, v in init_info.items()} # backwards compatibility @@ -103,7 +96,6 @@ def __repr__(self): class DatasetVersion(object): - def __init__(self, init_dict): parsed_dict = {k: parse(v) for k, v in init_dict.items()} self.__dict__.update(parsed_dict) @@ -113,7 +105,6 @@ def __repr__(self): class File(object): - def __init__(self, init_dict): parsed_dict = {k: parse(v) for k, v in init_dict.items()} self.__dict__.update(parsed_dict) @@ -133,7 +124,6 @@ def get_size(size, precision=0): class Tag(object): - def __init__(self, init_dict): parsed_dict = {k: parse(v) for k, v in init_dict.items()} self.__dict__.update(parsed_dict) @@ -143,7 +133,6 @@ def __repr__(self): class DatasetNewVersionResponse(object): - def __init__(self, init_dict): parsed_dict = {k: parse(v) for k, v in init_dict.items()} self.__dict__.update(parsed_dict) @@ -153,7 +142,6 @@ def __repr__(self): class DatasetNewResponse(object): - def __init__(self, init_dict): parsed_dict = {k: parse(v) for k, v in init_dict.items()} self.__dict__.update(parsed_dict) @@ -163,7 +151,6 @@ def __repr__(self): class ListFilesResult(object): - def __init__(self, init_dict): self.error_message = init_dict['errorMessage'] files = init_dict['datasetFiles'] @@ -171,13 +158,17 @@ def __init__(self, init_dict): self.files = [File(f) for f in files] else: self.files = {} + token = init_dict['nextPageToken'] + if token: + self.nextPageToken = token + else: + self.nextPageToken = "" def __repr__(self): return self.error_message class Kernel: - def __init__(self, init_dict): parsed_dict = {k: parse(v) for k, v in init_dict.items()} self.__dict__.update(parsed_dict) @@ -187,7 +178,6 @@ def __repr__(self): class KernelPushResponse(object): - def __init__(self, init_dict): parsed_dict = {k: parse(v) for k, v in init_dict.items()} self.__dict__.update(parsed_dict) @@ -197,7 +187,6 @@ def __repr__(self): class ModelNewResponse(object): - def __init__(self, init_dict): parsed_dict = {k: parse(v) for k, v in init_dict.items()} self.__dict__.update(parsed_dict) @@ -207,7 +196,6 @@ def __repr__(self): class ModelDeleteResponse(object): - def __init__(self, init_dict): parsed_dict = {k: parse(v) for k, v in init_dict.items()} self.__dict__.update(parsed_dict) diff --git a/src/kaggle/test/test_authenticate.py b/src/kaggle/test/test_authenticate.py index 4aea645..e50f90e 100644 --- a/src/kaggle/test/test_authenticate.py +++ b/src/kaggle/test/test_authenticate.py @@ -7,7 +7,6 @@ class TestAuthenticate(unittest.TestCase): - def setUp(self): print("setup class:%s" % self) diff --git a/tests/test_commands.sh b/tests/test_commands.sh new file mode 100755 index 0000000..0c2493e --- /dev/null +++ b/tests/test_commands.sh @@ -0,0 +1,12 @@ +#!/bin/bash + +# Verify the new options are plumbed through to the MT. +# The second two competitions fail because permissions are denied but everything else works, although +# the new options are ignored. + +# Pagination for listing of competitions, datasets, and kernels is out-of-scope for current work. + +kaggle competitions files titanic --page-size=3 --page-token=abcd +kaggle kernels files kerneler/sqlite-global-default --page-size=1 # valid page token required +kaggle datasets files kerneler/brazilian-bird-observation-metadata-from-wikiaves --page-size=7 --page-token=abcd +kaggle models instances versions files google/gemma/pytorch/7b/2 --page-size=3 --page-token=abcd