From c214eb1e4627ba769bd74e6149ef31cd0c4d40cb Mon Sep 17 00:00:00 2001 From: stevemessick Date: Tue, 17 Dec 2024 10:42:31 -0800 Subject: [PATCH] Remove swagger (#659) Reminders: - `kagglesdk` is generated by `kapigen` -- no review needed - The `kaggle` directory is a copy of `src/kaggle` -- no need to review both --- docs/README.md | 3 + kaggle/__init__.py | 13 +- kaggle/api/__init__.py | 22 - kaggle/api/kaggle_api.py | 4644 ----------------- kaggle/api/kaggle_api_extended.py | 593 +-- kaggle/api_client.py | 633 --- kaggle/cli.py | 19 +- kaggle/configuration.py | 444 +- kaggle/models/__init__.py | 52 - kaggle/models/api_blob_type.py | 111 +- kaggle/models/create_inbox_file_request.py | 160 - kaggle/models/dataset_column.py | 53 +- kaggle/models/dataset_new_request.py | 392 -- kaggle/models/dataset_new_version_request.py | 302 -- .../models/dataset_update_settings_request.py | 326 -- kaggle/models/error.py | 158 - kaggle/models/kernel_push_request.py | 571 -- kaggle/models/license.py | 137 - .../model_instance_new_version_request.py | 161 - .../models/model_instance_update_request.py | 367 -- kaggle/models/model_new_instance_request.py | 433 -- kaggle/models/model_new_request.py | 330 -- kaggle/models/model_update_request.py | 298 -- kaggle/models/result.py | 100 - kaggle/models/upload_file.py | 54 +- kaggle/rest.py | 336 -- kagglesdk/admin/__init__.py | 0 kagglesdk/admin/services/__init__.py | 0 .../admin/services/inbox_file_service.py | 22 + kagglesdk/admin/types/__init__.py | 0 kagglesdk/admin/types/inbox_file_service.py | 74 + kagglesdk/blobs/__init__.py | 0 kagglesdk/blobs/services/__init__.py | 0 kagglesdk/blobs/services/blob_api_service.py | 25 + kagglesdk/blobs/types/__init__.py | 0 kagglesdk/blobs/types/blob_api_service.py | 173 + kagglesdk/datasets/types/dataset_types.py | 24 +- kagglesdk/education/__init__.py | 0 kagglesdk/education/services/__init__.py | 0 .../services/education_api_service.py | 19 + kagglesdk/education/types/__init__.py | 0 .../education/types/education_api_service.py | 244 + .../education/types/education_service.py | 139 + kagglesdk/kaggle_client.py | 21 + kagglesdk/kaggle_object.py | 2 - .../kernels/services/kernels_api_service.py | 17 +- .../kernels/types/kernels_api_service.py | 105 + .../models/services/model_api_service.py | 49 +- kagglesdk/models/types/model_api_service.py | 394 ++ kagglesdk/users/types/users_enums.py | 14 +- requirements.txt | 5 + src/KaggleSwagger.yaml | 2301 -------- src/KaggleSwaggerConfig.json | 5 - src/kaggle/__init__.py | 3 +- src/kaggle/api/kaggle_api_extended.py | 219 +- src/kaggle/cli.py | 3 +- src/kaggle/configuration.py | 208 + src/kaggle/models/api_blob_type.py | 4 + src/kaggle/models/dataset_column.py | 227 + .../models/start_blob_upload_request.py | 244 + .../models/start_blob_upload_response.py | 82 +- src/kaggle/models/upload_file.py | 172 + tests/unit_tests.py | 48 +- tools/GeneratePythonLibrary.sh | 22 +- 64 files changed, 2877 insertions(+), 12700 deletions(-) delete mode 100644 kaggle/api/__init__.py delete mode 100644 kaggle/api/kaggle_api.py delete mode 100644 kaggle/api_client.py delete mode 100644 kaggle/models/__init__.py delete mode 100644 kaggle/models/create_inbox_file_request.py delete mode 100644 kaggle/models/dataset_new_request.py delete mode 100644 kaggle/models/dataset_new_version_request.py delete mode 100644 kaggle/models/dataset_update_settings_request.py delete mode 100644 kaggle/models/error.py delete mode 100644 kaggle/models/kernel_push_request.py delete mode 100644 kaggle/models/license.py delete mode 100644 kaggle/models/model_instance_new_version_request.py delete mode 100644 kaggle/models/model_instance_update_request.py delete mode 100644 kaggle/models/model_new_instance_request.py delete mode 100644 kaggle/models/model_new_request.py delete mode 100644 kaggle/models/model_update_request.py delete mode 100644 kaggle/models/result.py delete mode 100644 kaggle/rest.py create mode 100644 kagglesdk/admin/__init__.py create mode 100644 kagglesdk/admin/services/__init__.py create mode 100644 kagglesdk/admin/services/inbox_file_service.py create mode 100644 kagglesdk/admin/types/__init__.py create mode 100644 kagglesdk/admin/types/inbox_file_service.py create mode 100644 kagglesdk/blobs/__init__.py create mode 100644 kagglesdk/blobs/services/__init__.py create mode 100644 kagglesdk/blobs/services/blob_api_service.py create mode 100644 kagglesdk/blobs/types/__init__.py create mode 100644 kagglesdk/blobs/types/blob_api_service.py create mode 100644 kagglesdk/education/__init__.py create mode 100644 kagglesdk/education/services/__init__.py create mode 100644 kagglesdk/education/services/education_api_service.py create mode 100644 kagglesdk/education/types/__init__.py create mode 100644 kagglesdk/education/types/education_api_service.py create mode 100644 kagglesdk/education/types/education_service.py create mode 100644 requirements.txt delete mode 100644 src/KaggleSwagger.yaml delete mode 100644 src/KaggleSwaggerConfig.json create mode 100644 src/kaggle/configuration.py create mode 100644 src/kaggle/models/api_blob_type.py create mode 100644 src/kaggle/models/dataset_column.py create mode 100644 src/kaggle/models/start_blob_upload_request.py rename kaggle/models/collaborator.py => src/kaggle/models/start_blob_upload_response.py (58%) create mode 100644 src/kaggle/models/upload_file.py diff --git a/docs/README.md b/docs/README.md index b7c0957..cff06f5 100644 --- a/docs/README.md +++ b/docs/README.md @@ -1,5 +1,8 @@ # Kaggle API +CAUTION: Most of the files in this directory are obsolete and will be deleted. This file is valid, +as are `model_card.md`, `modelinstance_usage.md`, and `models_metadata.md`. + Official API for https://www.kaggle.com, accessible using a command line tool implemented in Python 3. Beta release - Kaggle reserves the right to modify the API functionality currently offered. diff --git a/kaggle/__init__.py b/kaggle/__init__.py index 4d2f819..7e342e5 100644 --- a/kaggle/__init__.py +++ b/kaggle/__init__.py @@ -14,10 +14,9 @@ # See the License for the specific language governing permissions and # limitations under the License. -# coding=utf-8 -from __future__ import absolute_import -from kaggle.api.kaggle_api_extended import KaggleApi -from kaggle.api_client import ApiClient - -api = KaggleApi(ApiClient()) -api.authenticate() +# coding=utf-8 +from __future__ import absolute_import +from kaggle.api.kaggle_api_extended import KaggleApi + +api = KaggleApi() +api.authenticate() diff --git a/kaggle/api/__init__.py b/kaggle/api/__init__.py deleted file mode 100644 index bd47882..0000000 --- a/kaggle/api/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -#!/usr/bin/python -# -# Copyright 2024 Kaggle Inc -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import - -# flake8: noqa - -# import apis into api package -from kaggle.api.kaggle_api_extended import KaggleApi diff --git a/kaggle/api/kaggle_api.py b/kaggle/api/kaggle_api.py deleted file mode 100644 index 28a0ba3..0000000 --- a/kaggle/api/kaggle_api.py +++ /dev/null @@ -1,4644 +0,0 @@ -#!/usr/bin/python -# -# Copyright 2024 Kaggle Inc -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 - -""" - Kaggle API - - API for kaggle.com # noqa: E501 - - OpenAPI spec version: 1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - - -from __future__ import absolute_import - -import re # noqa: F401 - -# python 2 and python 3 compatibility library -import six - -from kaggle.api_client import ApiClient - - -class KaggleApi(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - Ref: https://github.com/swagger-api/swagger-codegen - """ - - def __init__(self, api_client=None): - if api_client is None: - api_client = ApiClient() - self.api_client = api_client - - def competition_download_leaderboard(self, id, **kwargs): # noqa: E501 - """Download competition leaderboard # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.competition_download_leaderboard(id, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str id: Competition name (required) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.competition_download_leaderboard_with_http_info(id, **kwargs) # noqa: E501 - else: - (data) = self.competition_download_leaderboard_with_http_info(id, **kwargs) # noqa: E501 - return data - - def competition_download_leaderboard_with_http_info(self, id, **kwargs): # noqa: E501 - """Download competition leaderboard # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.competition_download_leaderboard_with_http_info(id, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str id: Competition name (required) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['id'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method competition_download_leaderboard" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'id' is set - if ('id' not in params or - params['id'] is None): - raise ValueError("Missing the required parameter `id` when calling `competition_download_leaderboard`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'id' in params: - path_params['id'] = params['id'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # Authentication setting - auth_settings = ['basicAuth'] # noqa: E501 - - return self.api_client.call_api( - '/competitions/{id}/leaderboard/download', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Result', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def competition_view_leaderboard(self, id, **kwargs): # noqa: E501 - """VIew competition leaderboard # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.competition_view_leaderboard(id, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str id: Competition name (required) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.competition_view_leaderboard_with_http_info(id, **kwargs) # noqa: E501 - else: - (data) = self.competition_view_leaderboard_with_http_info(id, **kwargs) # noqa: E501 - return data - - def competition_view_leaderboard_with_http_info(self, id, **kwargs): # noqa: E501 - """VIew competition leaderboard # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.competition_view_leaderboard_with_http_info(id, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str id: Competition name (required) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['id'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method competition_view_leaderboard" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'id' is set - if ('id' not in params or - params['id'] is None): - raise ValueError("Missing the required parameter `id` when calling `competition_view_leaderboard`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'id' in params: - path_params['id'] = params['id'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # Authentication setting - auth_settings = ['basicAuth'] # noqa: E501 - - return self.api_client.call_api( - '/competitions/{id}/leaderboard/view', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Result', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def competitions_data_download_file(self, id, file_name, **kwargs): # noqa: E501 - """Download competition data file # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.competitions_data_download_file(id, file_name, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str id: Competition name (required) - :param str file_name: Competition name (required) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.competitions_data_download_file_with_http_info(id, file_name, **kwargs) # noqa: E501 - else: - (data) = self.competitions_data_download_file_with_http_info(id, file_name, **kwargs) # noqa: E501 - return data - - def competitions_data_download_file_with_http_info(self, id, file_name, **kwargs): # noqa: E501 - """Download competition data file # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.competitions_data_download_file_with_http_info(id, file_name, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str id: Competition name (required) - :param str file_name: Competition name (required) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['id', 'file_name'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method competitions_data_download_file" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'id' is set - if ('id' not in params or - params['id'] is None): - raise ValueError("Missing the required parameter `id` when calling `competitions_data_download_file`") # noqa: E501 - # verify the required parameter 'file_name' is set - if ('file_name' not in params or - params['file_name'] is None): - raise ValueError("Missing the required parameter `file_name` when calling `competitions_data_download_file`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'id' in params: - path_params['id'] = params['id'] # noqa: E501 - if 'file_name' in params: - path_params['fileName'] = params['file_name'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # Authentication setting - auth_settings = ['basicAuth'] # noqa: E501 - - return self.api_client.call_api( - '/competitions/data/download/{id}/{fileName}', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Result', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def competitions_data_download_files(self, id, **kwargs): # noqa: E501 - """Download all competition data files # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.competitions_data_download_files(id, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str id: Competition name (required) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.competitions_data_download_files_with_http_info(id, **kwargs) # noqa: E501 - else: - (data) = self.competitions_data_download_files_with_http_info(id, **kwargs) # noqa: E501 - return data - - def competitions_data_download_files_with_http_info(self, id, **kwargs): # noqa: E501 - """Download all competition data files # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.competitions_data_download_files_with_http_info(id, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str id: Competition name (required) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['id'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method competitions_data_download_files" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'id' is set - if ('id' not in params or - params['id'] is None): - raise ValueError("Missing the required parameter `id` when calling `competitions_data_download_files`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'id' in params: - path_params['id'] = params['id'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # Authentication setting - auth_settings = ['basicAuth'] # noqa: E501 - - return self.api_client.call_api( - '/competitions/data/download-all/{id}', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Result', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def competitions_data_list_files(self, id, **kwargs): # noqa: E501 - """List competition data files # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.competitions_data_list_files(id, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str id: Competition name (required) - :param str page_token: Page token for pagination - :param int page_size: Number of items per page (default 20) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.competitions_data_list_files_with_http_info(id, **kwargs) # noqa: E501 - else: - (data) = self.competitions_data_list_files_with_http_info(id, **kwargs) # noqa: E501 - return data - - def competitions_data_list_files_with_http_info(self, id, **kwargs): # noqa: E501 - """List competition data files # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.competitions_data_list_files_with_http_info(id, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str id: Competition name (required) - :param str page_token: Page token for pagination - :param int page_size: Number of items per page (default 20) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['id', 'page_token', 'page_size'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method competitions_data_list_files" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'id' is set - if ('id' not in params or - params['id'] is None): - raise ValueError("Missing the required parameter `id` when calling `competitions_data_list_files`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'id' in params: - path_params['id'] = params['id'] # noqa: E501 - - query_params = [] - if 'page_token' in params: - query_params.append(('pageToken', params['page_token'])) # noqa: E501 - if 'page_size' in params: - query_params.append(('pageSize', params['page_size'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 - - # Authentication setting - auth_settings = ['basicAuth'] # noqa: E501 - - return self.api_client.call_api( - '/competitions/data/list/{id}', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Result', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def competitions_list(self, **kwargs): # noqa: E501 - """List competitions # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.competitions_list(async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str group: Filter competitions by a particular group - :param str category: Filter competitions by a particular category - :param str sort_by: Sort the results - :param int page: Page number - :param str search: Search terms - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.competitions_list_with_http_info(**kwargs) # noqa: E501 - else: - (data) = self.competitions_list_with_http_info(**kwargs) # noqa: E501 - return data - - def competitions_list_with_http_info(self, **kwargs): # noqa: E501 - """List competitions # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.competitions_list_with_http_info(async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str group: Filter competitions by a particular group - :param str category: Filter competitions by a particular category - :param str sort_by: Sort the results - :param int page: Page number - :param str search: Search terms - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['group', 'category', 'sort_by', 'page', 'search'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method competitions_list" % key - ) - params[key] = val - del params['kwargs'] - - collection_formats = {} - - path_params = {} - - query_params = [] - if 'group' in params: - query_params.append(('group', params['group'])) # noqa: E501 - if 'category' in params: - query_params.append(('category', params['category'])) # noqa: E501 - if 'sort_by' in params: - query_params.append(('sortBy', params['sort_by'])) # noqa: E501 - if 'page' in params: - query_params.append(('page', params['page'])) # noqa: E501 - if 'search' in params: - query_params.append(('search', params['search'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 - - # Authentication setting - auth_settings = ['basicAuth'] # noqa: E501 - - return self.api_client.call_api( - '/competitions/list', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Result', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def competitions_submissions_list(self, id, **kwargs): # noqa: E501 - """List competition submissions # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.competitions_submissions_list(id, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str id: Competition name (required) - :param int page: Page number - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.competitions_submissions_list_with_http_info(id, **kwargs) # noqa: E501 - else: - (data) = self.competitions_submissions_list_with_http_info(id, **kwargs) # noqa: E501 - return data - - def competitions_submissions_list_with_http_info(self, id, **kwargs): # noqa: E501 - """List competition submissions # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.competitions_submissions_list_with_http_info(id, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str id: Competition name (required) - :param int page: Page number - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['id', 'page'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method competitions_submissions_list" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'id' is set - if ('id' not in params or - params['id'] is None): - raise ValueError("Missing the required parameter `id` when calling `competitions_submissions_list`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'id' in params: - path_params['id'] = params['id'] # noqa: E501 - - query_params = [] - if 'page' in params: - query_params.append(('page', params['page'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 - - # Authentication setting - auth_settings = ['basicAuth'] # noqa: E501 - - return self.api_client.call_api( - '/competitions/submissions/list/{id}', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Result', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def competitions_submissions_submit(self, blob_file_tokens, submission_description, id, **kwargs): # noqa: E501 - """Submit to competition # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.competitions_submissions_submit(blob_file_tokens, submission_description, id, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str blob_file_tokens: Token identifying location of uploaded submission file (required) - :param str submission_description: Description of competition submission (required) - :param str id: Competition name (required) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.competitions_submissions_submit_with_http_info(blob_file_tokens, submission_description, id, **kwargs) # noqa: E501 - else: - (data) = self.competitions_submissions_submit_with_http_info(blob_file_tokens, submission_description, id, **kwargs) # noqa: E501 - return data - - def competitions_submissions_submit_with_http_info(self, blob_file_tokens, submission_description, id, **kwargs): # noqa: E501 - """Submit to competition # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.competitions_submissions_submit_with_http_info(blob_file_tokens, submission_description, id, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str blob_file_tokens: Token identifying location of uploaded submission file (required) - :param str submission_description: Description of competition submission (required) - :param str id: Competition name (required) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['blob_file_tokens', 'submission_description', 'id'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method competitions_submissions_submit" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'blob_file_tokens' is set - if ('blob_file_tokens' not in params or - params['blob_file_tokens'] is None): - raise ValueError("Missing the required parameter `blob_file_tokens` when calling `competitions_submissions_submit`") # noqa: E501 - # verify the required parameter 'submission_description' is set - if ('submission_description' not in params or - params['submission_description'] is None): - raise ValueError("Missing the required parameter `submission_description` when calling `competitions_submissions_submit`") # noqa: E501 - # verify the required parameter 'id' is set - if ('id' not in params or - params['id'] is None): - raise ValueError("Missing the required parameter `id` when calling `competitions_submissions_submit`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'id' in params: - path_params['id'] = params['id'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - if 'blob_file_tokens' in params: - form_params.append(('blobFileTokens', params['blob_file_tokens'])) # noqa: E501 - if 'submission_description' in params: - form_params.append(('submissionDescription', params['submission_description'])) # noqa: E501 - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['multipart/form-data']) # noqa: E501 - - # Authentication setting - auth_settings = ['basicAuth'] # noqa: E501 - - return self.api_client.call_api( - '/competitions/submissions/submit/{id}', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Result', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def competitions_submissions_upload(self, file, guid, content_length, last_modified_date_utc, **kwargs): # noqa: E501 - """Upload competition submission file # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.competitions_submissions_upload(file, guid, content_length, last_modified_date_utc, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param file file: Competition submission file (required) - :param str guid: Location where submission should be uploaded (required) - :param int content_length: Content length of file in bytes (required) - :param int last_modified_date_utc: Last modified date of file in seconds since epoch in UTC (required) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.competitions_submissions_upload_with_http_info(file, guid, content_length, last_modified_date_utc, **kwargs) # noqa: E501 - else: - (data) = self.competitions_submissions_upload_with_http_info(file, guid, content_length, last_modified_date_utc, **kwargs) # noqa: E501 - return data - - def competitions_submissions_upload_with_http_info(self, file, guid, content_length, last_modified_date_utc, **kwargs): # noqa: E501 - """Upload competition submission file # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.competitions_submissions_upload_with_http_info(file, guid, content_length, last_modified_date_utc, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param file file: Competition submission file (required) - :param str guid: Location where submission should be uploaded (required) - :param int content_length: Content length of file in bytes (required) - :param int last_modified_date_utc: Last modified date of file in seconds since epoch in UTC (required) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['file', 'guid', 'content_length', 'last_modified_date_utc'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method competitions_submissions_upload" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'file' is set - if ('file' not in params or - params['file'] is None): - raise ValueError("Missing the required parameter `file` when calling `competitions_submissions_upload`") # noqa: E501 - # verify the required parameter 'guid' is set - if ('guid' not in params or - params['guid'] is None): - raise ValueError("Missing the required parameter `guid` when calling `competitions_submissions_upload`") # noqa: E501 - # verify the required parameter 'content_length' is set - if ('content_length' not in params or - params['content_length'] is None): - raise ValueError("Missing the required parameter `content_length` when calling `competitions_submissions_upload`") # noqa: E501 - # verify the required parameter 'last_modified_date_utc' is set - if ('last_modified_date_utc' not in params or - params['last_modified_date_utc'] is None): - raise ValueError("Missing the required parameter `last_modified_date_utc` when calling `competitions_submissions_upload`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'guid' in params: - path_params['guid'] = params['guid'] # noqa: E501 - if 'content_length' in params: - path_params['contentLength'] = params['content_length'] # noqa: E501 - if 'last_modified_date_utc' in params: - path_params['lastModifiedDateUtc'] = params['last_modified_date_utc'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - if 'file' in params: - local_var_files['file'] = params['file'] # noqa: E501 - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['multipart/form-data']) # noqa: E501 - - # Authentication setting - auth_settings = ['basicAuth'] # noqa: E501 - - return self.api_client.call_api( - '/competitions/submissions/upload/{guid}/{contentLength}/{lastModifiedDateUtc}', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Result', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def competitions_submissions_url(self, id, content_length, last_modified_date_utc, **kwargs): # noqa: E501 - """Generate competition submission URL # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.competitions_submissions_url(id, content_length, last_modified_date_utc, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str id: Competition name, as it appears in the competition's URL (required) - :param int content_length: Content length of file in bytes (required) - :param int last_modified_date_utc: Last modified date of file in seconds since epoch in UTC (required) - :param str file_name: Competition submission file name - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.competitions_submissions_url_with_http_info(id, content_length, last_modified_date_utc, **kwargs) # noqa: E501 - else: - (data) = self.competitions_submissions_url_with_http_info(id, content_length, last_modified_date_utc, **kwargs) # noqa: E501 - return data - - def competitions_submissions_url_with_http_info(self, id, content_length, last_modified_date_utc, **kwargs): # noqa: E501 - """Generate competition submission URL # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.competitions_submissions_url_with_http_info(id, content_length, last_modified_date_utc, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str id: Competition name, as it appears in the competition's URL (required) - :param int content_length: Content length of file in bytes (required) - :param int last_modified_date_utc: Last modified date of file in seconds since epoch in UTC (required) - :param str file_name: Competition submission file name - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['id', 'content_length', 'last_modified_date_utc', 'file_name'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method competitions_submissions_url" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'id' is set - if ('id' not in params or - params['id'] is None): - raise ValueError("Missing the required parameter `id` when calling `competitions_submissions_url`") # noqa: E501 - # verify the required parameter 'content_length' is set - if ('content_length' not in params or - params['content_length'] is None): - raise ValueError("Missing the required parameter `content_length` when calling `competitions_submissions_url`") # noqa: E501 - # verify the required parameter 'last_modified_date_utc' is set - if ('last_modified_date_utc' not in params or - params['last_modified_date_utc'] is None): - raise ValueError("Missing the required parameter `last_modified_date_utc` when calling `competitions_submissions_url`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'id' in params: - path_params['id'] = params['id'] # noqa: E501 - if 'content_length' in params: - path_params['contentLength'] = params['content_length'] # noqa: E501 - if 'last_modified_date_utc' in params: - path_params['lastModifiedDateUtc'] = params['last_modified_date_utc'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - if 'file_name' in params: - form_params.append(('fileName', params['file_name'])) # noqa: E501 - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['multipart/form-data']) # noqa: E501 - - # Authentication setting - auth_settings = ['basicAuth'] # noqa: E501 - - return self.api_client.call_api( - '/competitions/{id}/submissions/url/{contentLength}/{lastModifiedDateUtc}', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Result', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def create_inbox_file(self, create_inbox_file_request, **kwargs): # noqa: E501 - """Creates (aka \"drops\") a new file into the inbox. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.create_inbox_file(create_inbox_file_request, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param CreateInboxFileRequest create_inbox_file_request: (required) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.create_inbox_file_with_http_info(create_inbox_file_request, **kwargs) # noqa: E501 - else: - (data) = self.create_inbox_file_with_http_info(create_inbox_file_request, **kwargs) # noqa: E501 - return data - - def create_inbox_file_with_http_info(self, create_inbox_file_request, **kwargs): # noqa: E501 - """Creates (aka \"drops\") a new file into the inbox. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.create_inbox_file_with_http_info(create_inbox_file_request, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param CreateInboxFileRequest create_inbox_file_request: (required) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['create_inbox_file_request'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method create_inbox_file" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'create_inbox_file_request' is set - if ('create_inbox_file_request' not in params or - params['create_inbox_file_request'] is None): - raise ValueError("Missing the required parameter `create_inbox_file_request` when calling `create_inbox_file`") # noqa: E501 - - collection_formats = {} - - path_params = {} - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'create_inbox_file_request' in params: - body_params = params['create_inbox_file_request'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['application/json']) # noqa: E501 - - # Authentication setting - auth_settings = ['basicAuth'] # noqa: E501 - - return self.api_client.call_api( - '/inbox/files/create', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Result', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def datasets_create_new(self, dataset_new_request, **kwargs): # noqa: E501 - """Create a new dataset # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.datasets_create_new(dataset_new_request, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param DatasetNewRequest dataset_new_request: Information for creating a new dataset (required) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.datasets_create_new_with_http_info(dataset_new_request, **kwargs) # noqa: E501 - else: - (data) = self.datasets_create_new_with_http_info(dataset_new_request, **kwargs) # noqa: E501 - return data - - def datasets_create_new_with_http_info(self, dataset_new_request, **kwargs): # noqa: E501 - """Create a new dataset # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.datasets_create_new_with_http_info(dataset_new_request, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param DatasetNewRequest dataset_new_request: Information for creating a new dataset (required) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['dataset_new_request'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method datasets_create_new" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'dataset_new_request' is set - if ('dataset_new_request' not in params or - params['dataset_new_request'] is None): - raise ValueError("Missing the required parameter `dataset_new_request` when calling `datasets_create_new`") # noqa: E501 - - collection_formats = {} - - path_params = {} - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'dataset_new_request' in params: - body_params = params['dataset_new_request'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['application/json']) # noqa: E501 - - # Authentication setting - auth_settings = ['basicAuth'] # noqa: E501 - - return self.api_client.call_api( - '/datasets/create/new', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Result', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def datasets_create_version(self, owner_slug, dataset_slug, dataset_new_version_request, **kwargs): # noqa: E501 - """Create a new dataset version # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.datasets_create_version(owner_slug, dataset_slug, dataset_new_version_request, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str owner_slug: Dataset owner (required) - :param str dataset_slug: Dataset name (required) - :param DatasetNewVersionRequest dataset_new_version_request: Information for creating a new dataset version (required) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.datasets_create_version_with_http_info(owner_slug, dataset_slug, dataset_new_version_request, **kwargs) # noqa: E501 - else: - (data) = self.datasets_create_version_with_http_info(owner_slug, dataset_slug, dataset_new_version_request, **kwargs) # noqa: E501 - return data - - def datasets_create_version_with_http_info(self, owner_slug, dataset_slug, dataset_new_version_request, **kwargs): # noqa: E501 - """Create a new dataset version # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.datasets_create_version_with_http_info(owner_slug, dataset_slug, dataset_new_version_request, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str owner_slug: Dataset owner (required) - :param str dataset_slug: Dataset name (required) - :param DatasetNewVersionRequest dataset_new_version_request: Information for creating a new dataset version (required) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['owner_slug', 'dataset_slug', 'dataset_new_version_request'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method datasets_create_version" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'owner_slug' is set - if ('owner_slug' not in params or - params['owner_slug'] is None): - raise ValueError("Missing the required parameter `owner_slug` when calling `datasets_create_version`") # noqa: E501 - # verify the required parameter 'dataset_slug' is set - if ('dataset_slug' not in params or - params['dataset_slug'] is None): - raise ValueError("Missing the required parameter `dataset_slug` when calling `datasets_create_version`") # noqa: E501 - # verify the required parameter 'dataset_new_version_request' is set - if ('dataset_new_version_request' not in params or - params['dataset_new_version_request'] is None): - raise ValueError("Missing the required parameter `dataset_new_version_request` when calling `datasets_create_version`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'owner_slug' in params: - path_params['ownerSlug'] = params['owner_slug'] # noqa: E501 - if 'dataset_slug' in params: - path_params['datasetSlug'] = params['dataset_slug'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'dataset_new_version_request' in params: - body_params = params['dataset_new_version_request'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['application/json']) # noqa: E501 - - # Authentication setting - auth_settings = ['basicAuth'] # noqa: E501 - - return self.api_client.call_api( - '/datasets/create/version/{ownerSlug}/{datasetSlug}', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Result', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def datasets_create_version_by_id(self, id, dataset_new_version_request, **kwargs): # noqa: E501 - """Create a new dataset version by id # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.datasets_create_version_by_id(id, dataset_new_version_request, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param int id: Dataset ID (required) - :param DatasetNewVersionRequest dataset_new_version_request: Information for creating a new dataset version (required) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.datasets_create_version_by_id_with_http_info(id, dataset_new_version_request, **kwargs) # noqa: E501 - else: - (data) = self.datasets_create_version_by_id_with_http_info(id, dataset_new_version_request, **kwargs) # noqa: E501 - return data - - def datasets_create_version_by_id_with_http_info(self, id, dataset_new_version_request, **kwargs): # noqa: E501 - """Create a new dataset version by id # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.datasets_create_version_by_id_with_http_info(id, dataset_new_version_request, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param int id: Dataset ID (required) - :param DatasetNewVersionRequest dataset_new_version_request: Information for creating a new dataset version (required) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['id', 'dataset_new_version_request'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method datasets_create_version_by_id" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'id' is set - if ('id' not in params or - params['id'] is None): - raise ValueError("Missing the required parameter `id` when calling `datasets_create_version_by_id`") # noqa: E501 - # verify the required parameter 'dataset_new_version_request' is set - if ('dataset_new_version_request' not in params or - params['dataset_new_version_request'] is None): - raise ValueError("Missing the required parameter `dataset_new_version_request` when calling `datasets_create_version_by_id`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'id' in params: - path_params['id'] = params['id'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'dataset_new_version_request' in params: - body_params = params['dataset_new_version_request'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['application/json']) # noqa: E501 - - # Authentication setting - auth_settings = ['basicAuth'] # noqa: E501 - - return self.api_client.call_api( - '/datasets/create/version/{id}', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Result', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def datasets_download(self, owner_slug, dataset_slug, **kwargs): # noqa: E501 - """Download dataset file # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.datasets_download(owner_slug, dataset_slug, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str owner_slug: Dataset owner (required) - :param str dataset_slug: Dataset name (required) - :param str dataset_version_number: Dataset version number - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.datasets_download_with_http_info(owner_slug, dataset_slug, **kwargs) # noqa: E501 - else: - (data) = self.datasets_download_with_http_info(owner_slug, dataset_slug, **kwargs) # noqa: E501 - return data - - def datasets_download_with_http_info(self, owner_slug, dataset_slug, **kwargs): # noqa: E501 - """Download dataset file # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.datasets_download_with_http_info(owner_slug, dataset_slug, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str owner_slug: Dataset owner (required) - :param str dataset_slug: Dataset name (required) - :param str dataset_version_number: Dataset version number - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['owner_slug', 'dataset_slug', 'dataset_version_number'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method datasets_download" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'owner_slug' is set - if ('owner_slug' not in params or - params['owner_slug'] is None): - raise ValueError("Missing the required parameter `owner_slug` when calling `datasets_download`") # noqa: E501 - # verify the required parameter 'dataset_slug' is set - if ('dataset_slug' not in params or - params['dataset_slug'] is None): - raise ValueError("Missing the required parameter `dataset_slug` when calling `datasets_download`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'owner_slug' in params: - path_params['ownerSlug'] = params['owner_slug'] # noqa: E501 - if 'dataset_slug' in params: - path_params['datasetSlug'] = params['dataset_slug'] # noqa: E501 - - query_params = [] - if 'dataset_version_number' in params: - query_params.append(('datasetVersionNumber', params['dataset_version_number'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['file']) # noqa: E501 - - # Authentication setting - auth_settings = ['basicAuth'] # noqa: E501 - - return self.api_client.call_api( - '/datasets/download/{ownerSlug}/{datasetSlug}', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Result', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def datasets_download_file(self, owner_slug, dataset_slug, file_name, **kwargs): # noqa: E501 - """Download dataset file # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.datasets_download_file(owner_slug, dataset_slug, file_name, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str owner_slug: Dataset owner (required) - :param str dataset_slug: Dataset name (required) - :param str file_name: File name (required) - :param str dataset_version_number: Dataset version number - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.datasets_download_file_with_http_info(owner_slug, dataset_slug, file_name, **kwargs) # noqa: E501 - else: - (data) = self.datasets_download_file_with_http_info(owner_slug, dataset_slug, file_name, **kwargs) # noqa: E501 - return data - - def datasets_download_file_with_http_info(self, owner_slug, dataset_slug, file_name, **kwargs): # noqa: E501 - """Download dataset file # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.datasets_download_file_with_http_info(owner_slug, dataset_slug, file_name, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str owner_slug: Dataset owner (required) - :param str dataset_slug: Dataset name (required) - :param str file_name: File name (required) - :param str dataset_version_number: Dataset version number - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['owner_slug', 'dataset_slug', 'file_name', 'dataset_version_number'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method datasets_download_file" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'owner_slug' is set - if ('owner_slug' not in params or - params['owner_slug'] is None): - raise ValueError("Missing the required parameter `owner_slug` when calling `datasets_download_file`") # noqa: E501 - # verify the required parameter 'dataset_slug' is set - if ('dataset_slug' not in params or - params['dataset_slug'] is None): - raise ValueError("Missing the required parameter `dataset_slug` when calling `datasets_download_file`") # noqa: E501 - # verify the required parameter 'file_name' is set - if ('file_name' not in params or - params['file_name'] is None): - raise ValueError("Missing the required parameter `file_name` when calling `datasets_download_file`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'owner_slug' in params: - path_params['ownerSlug'] = params['owner_slug'] # noqa: E501 - if 'dataset_slug' in params: - path_params['datasetSlug'] = params['dataset_slug'] # noqa: E501 - if 'file_name' in params: - path_params['fileName'] = params['file_name'] # noqa: E501 - - query_params = [] - if 'dataset_version_number' in params: - query_params.append(('datasetVersionNumber', params['dataset_version_number'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['file']) # noqa: E501 - - # Authentication setting - auth_settings = ['basicAuth'] # noqa: E501 - - return self.api_client.call_api( - '/datasets/download/{ownerSlug}/{datasetSlug}/{fileName}', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Result', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def datasets_list(self, **kwargs): # noqa: E501 - """List datasets # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.datasets_list(async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str group: Display datasets by a particular group - :param str sort_by: Sort the results - :param str size: (DEPRECATED). Please use --max-size and --min-size to filter dataset sizes. - :param str filetype: Display datasets of a specific file type - :param str license: Display datasets with a specific license - :param str tagids: A comma separated list of tags to filter by - :param str search: Search terms - :param str user: Display datasets by a specific user or organization - :param int page: Page number - :param int max_size: Max Dataset Size (bytes) - :param int min_size: Max Dataset Size (bytes) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.datasets_list_with_http_info(**kwargs) # noqa: E501 - else: - (data) = self.datasets_list_with_http_info(**kwargs) # noqa: E501 - return data - - def datasets_list_with_http_info(self, **kwargs): # noqa: E501 - """List datasets # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.datasets_list_with_http_info(async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str group: Display datasets by a particular group - :param str sort_by: Sort the results - :param str size: (DEPRECATED). Please use --max-size and --min-size to filter dataset sizes. - :param str filetype: Display datasets of a specific file type - :param str license: Display datasets with a specific license - :param str tagids: A comma separated list of tags to filter by - :param str search: Search terms - :param str user: Display datasets by a specific user or organization - :param int page: Page number - :param int max_size: Max Dataset Size (bytes) - :param int min_size: Max Dataset Size (bytes) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['group', 'sort_by', 'size', 'filetype', 'license', 'tagids', 'search', 'user', 'page', 'max_size', 'min_size'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method datasets_list" % key - ) - params[key] = val - del params['kwargs'] - - collection_formats = {} - - path_params = {} - - query_params = [] - if 'group' in params: - query_params.append(('group', params['group'])) # noqa: E501 - if 'sort_by' in params: - query_params.append(('sortBy', params['sort_by'])) # noqa: E501 - if 'size' in params: - query_params.append(('size', params['size'])) # noqa: E501 - if 'filetype' in params: - query_params.append(('filetype', params['filetype'])) # noqa: E501 - if 'license' in params: - query_params.append(('license', params['license'])) # noqa: E501 - if 'tagids' in params: - query_params.append(('tagids', params['tagids'])) # noqa: E501 - if 'search' in params: - query_params.append(('search', params['search'])) # noqa: E501 - if 'user' in params: - query_params.append(('user', params['user'])) # noqa: E501 - if 'page' in params: - query_params.append(('page', params['page'])) # noqa: E501 - if 'max_size' in params: - query_params.append(('maxSize', params['max_size'])) # noqa: E501 - if 'min_size' in params: - query_params.append(('minSize', params['min_size'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 - - # Authentication setting - auth_settings = ['basicAuth'] # noqa: E501 - - return self.api_client.call_api( - '/datasets/list', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Result', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def datasets_list_files(self, owner_slug, dataset_slug, **kwargs): # noqa: E501 - """List dataset files # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.datasets_list_files(owner_slug, dataset_slug, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str owner_slug: Dataset owner (required) - :param str dataset_slug: Dataset name (required) - :param str dataset_version_number: Dataset version number - :param str page_token: Page token for pagination - :param int page_size: Number of items per page (default 20) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.datasets_list_files_with_http_info(owner_slug, dataset_slug, **kwargs) # noqa: E501 - else: - (data) = self.datasets_list_files_with_http_info(owner_slug, dataset_slug, **kwargs) # noqa: E501 - return data - - def datasets_list_files_with_http_info(self, owner_slug, dataset_slug, **kwargs): # noqa: E501 - """List dataset files # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.datasets_list_files_with_http_info(owner_slug, dataset_slug, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str owner_slug: Dataset owner (required) - :param str dataset_slug: Dataset name (required) - :param str dataset_version_number: Dataset version number - :param str page_token: Page token for pagination - :param int page_size: Number of items per page (default 20) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['owner_slug', 'dataset_slug', 'dataset_version_number', 'page_token', 'page_size'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method datasets_list_files" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'owner_slug' is set - if ('owner_slug' not in params or - params['owner_slug'] is None): - raise ValueError("Missing the required parameter `owner_slug` when calling `datasets_list_files`") # noqa: E501 - # verify the required parameter 'dataset_slug' is set - if ('dataset_slug' not in params or - params['dataset_slug'] is None): - raise ValueError("Missing the required parameter `dataset_slug` when calling `datasets_list_files`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'owner_slug' in params: - path_params['ownerSlug'] = params['owner_slug'] # noqa: E501 - if 'dataset_slug' in params: - path_params['datasetSlug'] = params['dataset_slug'] # noqa: E501 - - query_params = [] - if 'dataset_version_number' in params: - query_params.append(('datasetVersionNumber', params['dataset_version_number'])) # noqa: E501 - if 'page_token' in params: - query_params.append(('pageToken', params['page_token'])) # noqa: E501 - if 'page_size' in params: - query_params.append(('pageSize', params['page_size'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 - - # Authentication setting - auth_settings = ['basicAuth'] # noqa: E501 - - return self.api_client.call_api( - '/datasets/list/{ownerSlug}/{datasetSlug}', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Result', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def datasets_status(self, owner_slug, dataset_slug, **kwargs): # noqa: E501 - """Get dataset creation status # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.datasets_status(owner_slug, dataset_slug, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str owner_slug: Dataset owner (required) - :param str dataset_slug: Dataset name (required) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.datasets_status_with_http_info(owner_slug, dataset_slug, **kwargs) # noqa: E501 - else: - (data) = self.datasets_status_with_http_info(owner_slug, dataset_slug, **kwargs) # noqa: E501 - return data - - def datasets_status_with_http_info(self, owner_slug, dataset_slug, **kwargs): # noqa: E501 - """Get dataset creation status # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.datasets_status_with_http_info(owner_slug, dataset_slug, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str owner_slug: Dataset owner (required) - :param str dataset_slug: Dataset name (required) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['owner_slug', 'dataset_slug'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method datasets_status" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'owner_slug' is set - if ('owner_slug' not in params or - params['owner_slug'] is None): - raise ValueError("Missing the required parameter `owner_slug` when calling `datasets_status`") # noqa: E501 - # verify the required parameter 'dataset_slug' is set - if ('dataset_slug' not in params or - params['dataset_slug'] is None): - raise ValueError("Missing the required parameter `dataset_slug` when calling `datasets_status`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'owner_slug' in params: - path_params['ownerSlug'] = params['owner_slug'] # noqa: E501 - if 'dataset_slug' in params: - path_params['datasetSlug'] = params['dataset_slug'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 - - # Authentication setting - auth_settings = ['basicAuth'] # noqa: E501 - - return self.api_client.call_api( - '/datasets/status/{ownerSlug}/{datasetSlug}', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Result', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def delete_model(self, owner_slug, model_slug, **kwargs): # noqa: E501 - """Delete a model # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.delete_model(owner_slug, model_slug, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str owner_slug: Model owner (required) - :param str model_slug: Model name (required) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.delete_model_with_http_info(owner_slug, model_slug, **kwargs) # noqa: E501 - else: - (data) = self.delete_model_with_http_info(owner_slug, model_slug, **kwargs) # noqa: E501 - return data - - def delete_model_with_http_info(self, owner_slug, model_slug, **kwargs): # noqa: E501 - """Delete a model # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.delete_model_with_http_info(owner_slug, model_slug, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str owner_slug: Model owner (required) - :param str model_slug: Model name (required) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['owner_slug', 'model_slug'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method delete_model" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'owner_slug' is set - if ('owner_slug' not in params or - params['owner_slug'] is None): - raise ValueError("Missing the required parameter `owner_slug` when calling `delete_model`") # noqa: E501 - # verify the required parameter 'model_slug' is set - if ('model_slug' not in params or - params['model_slug'] is None): - raise ValueError("Missing the required parameter `model_slug` when calling `delete_model`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'owner_slug' in params: - path_params['ownerSlug'] = params['owner_slug'] # noqa: E501 - if 'model_slug' in params: - path_params['modelSlug'] = params['model_slug'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['application/json']) # noqa: E501 - - # Authentication setting - auth_settings = ['basicAuth'] # noqa: E501 - - return self.api_client.call_api( - '/models/{ownerSlug}/{modelSlug}/delete', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Result', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def delete_model_instance(self, owner_slug, model_slug, framework, instance_slug, **kwargs): # noqa: E501 - """Delete a model instance # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.delete_model_instance(owner_slug, model_slug, framework, instance_slug, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str owner_slug: Model owner (required) - :param str model_slug: Model name (required) - :param str framework: Model instance framework (required) - :param str instance_slug: Model instance slug (required) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.delete_model_instance_with_http_info(owner_slug, model_slug, framework, instance_slug, **kwargs) # noqa: E501 - else: - (data) = self.delete_model_instance_with_http_info(owner_slug, model_slug, framework, instance_slug, **kwargs) # noqa: E501 - return data - - def delete_model_instance_with_http_info(self, owner_slug, model_slug, framework, instance_slug, **kwargs): # noqa: E501 - """Delete a model instance # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.delete_model_instance_with_http_info(owner_slug, model_slug, framework, instance_slug, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str owner_slug: Model owner (required) - :param str model_slug: Model name (required) - :param str framework: Model instance framework (required) - :param str instance_slug: Model instance slug (required) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['owner_slug', 'model_slug', 'framework', 'instance_slug'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method delete_model_instance" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'owner_slug' is set - if ('owner_slug' not in params or - params['owner_slug'] is None): - raise ValueError("Missing the required parameter `owner_slug` when calling `delete_model_instance`") # noqa: E501 - # verify the required parameter 'model_slug' is set - if ('model_slug' not in params or - params['model_slug'] is None): - raise ValueError("Missing the required parameter `model_slug` when calling `delete_model_instance`") # noqa: E501 - # verify the required parameter 'framework' is set - if ('framework' not in params or - params['framework'] is None): - raise ValueError("Missing the required parameter `framework` when calling `delete_model_instance`") # noqa: E501 - # verify the required parameter 'instance_slug' is set - if ('instance_slug' not in params or - params['instance_slug'] is None): - raise ValueError("Missing the required parameter `instance_slug` when calling `delete_model_instance`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'owner_slug' in params: - path_params['ownerSlug'] = params['owner_slug'] # noqa: E501 - if 'model_slug' in params: - path_params['modelSlug'] = params['model_slug'] # noqa: E501 - if 'framework' in params: - path_params['framework'] = params['framework'] # noqa: E501 - if 'instance_slug' in params: - path_params['instanceSlug'] = params['instance_slug'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['application/json']) # noqa: E501 - - # Authentication setting - auth_settings = ['basicAuth'] # noqa: E501 - - return self.api_client.call_api( - '/models/{ownerSlug}/{modelSlug}/{framework}/{instanceSlug}/delete', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Result', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def delete_model_instance_version(self, owner_slug, model_slug, framework, instance_slug, version_number, **kwargs): # noqa: E501 - """Delete a model instance version # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.delete_model_instance_version(owner_slug, model_slug, framework, instance_slug, version_number, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str owner_slug: Model owner (required) - :param str model_slug: Model name (required) - :param str framework: Model instance framework (required) - :param str instance_slug: Model instance slug (required) - :param str version_number: Model instance version number (required) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.delete_model_instance_version_with_http_info(owner_slug, model_slug, framework, instance_slug, version_number, **kwargs) # noqa: E501 - else: - (data) = self.delete_model_instance_version_with_http_info(owner_slug, model_slug, framework, instance_slug, version_number, **kwargs) # noqa: E501 - return data - - def delete_model_instance_version_with_http_info(self, owner_slug, model_slug, framework, instance_slug, version_number, **kwargs): # noqa: E501 - """Delete a model instance version # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.delete_model_instance_version_with_http_info(owner_slug, model_slug, framework, instance_slug, version_number, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str owner_slug: Model owner (required) - :param str model_slug: Model name (required) - :param str framework: Model instance framework (required) - :param str instance_slug: Model instance slug (required) - :param str version_number: Model instance version number (required) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['owner_slug', 'model_slug', 'framework', 'instance_slug', 'version_number'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method delete_model_instance_version" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'owner_slug' is set - if ('owner_slug' not in params or - params['owner_slug'] is None): - raise ValueError("Missing the required parameter `owner_slug` when calling `delete_model_instance_version`") # noqa: E501 - # verify the required parameter 'model_slug' is set - if ('model_slug' not in params or - params['model_slug'] is None): - raise ValueError("Missing the required parameter `model_slug` when calling `delete_model_instance_version`") # noqa: E501 - # verify the required parameter 'framework' is set - if ('framework' not in params or - params['framework'] is None): - raise ValueError("Missing the required parameter `framework` when calling `delete_model_instance_version`") # noqa: E501 - # verify the required parameter 'instance_slug' is set - if ('instance_slug' not in params or - params['instance_slug'] is None): - raise ValueError("Missing the required parameter `instance_slug` when calling `delete_model_instance_version`") # noqa: E501 - # verify the required parameter 'version_number' is set - if ('version_number' not in params or - params['version_number'] is None): - raise ValueError("Missing the required parameter `version_number` when calling `delete_model_instance_version`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'owner_slug' in params: - path_params['ownerSlug'] = params['owner_slug'] # noqa: E501 - if 'model_slug' in params: - path_params['modelSlug'] = params['model_slug'] # noqa: E501 - if 'framework' in params: - path_params['framework'] = params['framework'] # noqa: E501 - if 'instance_slug' in params: - path_params['instanceSlug'] = params['instance_slug'] # noqa: E501 - if 'version_number' in params: - path_params['versionNumber'] = params['version_number'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['application/json']) # noqa: E501 - - # Authentication setting - auth_settings = ['basicAuth'] # noqa: E501 - - return self.api_client.call_api( - '/models/{ownerSlug}/{modelSlug}/{framework}/{instanceSlug}/{versionNumber}/delete', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Result', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_model(self, owner_slug, model_slug, **kwargs): # noqa: E501 - """Get a model # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_model(owner_slug, model_slug, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str owner_slug: Model owner (required) - :param str model_slug: Model name (required) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_model_with_http_info(owner_slug, model_slug, **kwargs) # noqa: E501 - else: - (data) = self.get_model_with_http_info(owner_slug, model_slug, **kwargs) # noqa: E501 - return data - - def get_model_with_http_info(self, owner_slug, model_slug, **kwargs): # noqa: E501 - """Get a model # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_model_with_http_info(owner_slug, model_slug, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str owner_slug: Model owner (required) - :param str model_slug: Model name (required) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['owner_slug', 'model_slug'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_model" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'owner_slug' is set - if ('owner_slug' not in params or - params['owner_slug'] is None): - raise ValueError("Missing the required parameter `owner_slug` when calling `get_model`") # noqa: E501 - # verify the required parameter 'model_slug' is set - if ('model_slug' not in params or - params['model_slug'] is None): - raise ValueError("Missing the required parameter `model_slug` when calling `get_model`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'owner_slug' in params: - path_params['ownerSlug'] = params['owner_slug'] # noqa: E501 - if 'model_slug' in params: - path_params['modelSlug'] = params['model_slug'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 - - # Authentication setting - auth_settings = ['basicAuth'] # noqa: E501 - - return self.api_client.call_api( - '/models/{ownerSlug}/{modelSlug}/get', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Result', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def get_model_instance(self, owner_slug, model_slug, framework, instance_slug, **kwargs): # noqa: E501 - """Get a model instance # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_model_instance(owner_slug, model_slug, framework, instance_slug, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str owner_slug: Model owner (required) - :param str model_slug: Model name (required) - :param str framework: Model instance framework (required) - :param str instance_slug: Model instance slug (required) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.get_model_instance_with_http_info(owner_slug, model_slug, framework, instance_slug, **kwargs) # noqa: E501 - else: - (data) = self.get_model_instance_with_http_info(owner_slug, model_slug, framework, instance_slug, **kwargs) # noqa: E501 - return data - - def get_model_instance_with_http_info(self, owner_slug, model_slug, framework, instance_slug, **kwargs): # noqa: E501 - """Get a model instance # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.get_model_instance_with_http_info(owner_slug, model_slug, framework, instance_slug, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str owner_slug: Model owner (required) - :param str model_slug: Model name (required) - :param str framework: Model instance framework (required) - :param str instance_slug: Model instance slug (required) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['owner_slug', 'model_slug', 'framework', 'instance_slug'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method get_model_instance" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'owner_slug' is set - if ('owner_slug' not in params or - params['owner_slug'] is None): - raise ValueError("Missing the required parameter `owner_slug` when calling `get_model_instance`") # noqa: E501 - # verify the required parameter 'model_slug' is set - if ('model_slug' not in params or - params['model_slug'] is None): - raise ValueError("Missing the required parameter `model_slug` when calling `get_model_instance`") # noqa: E501 - # verify the required parameter 'framework' is set - if ('framework' not in params or - params['framework'] is None): - raise ValueError("Missing the required parameter `framework` when calling `get_model_instance`") # noqa: E501 - # verify the required parameter 'instance_slug' is set - if ('instance_slug' not in params or - params['instance_slug'] is None): - raise ValueError("Missing the required parameter `instance_slug` when calling `get_model_instance`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'owner_slug' in params: - path_params['ownerSlug'] = params['owner_slug'] # noqa: E501 - if 'model_slug' in params: - path_params['modelSlug'] = params['model_slug'] # noqa: E501 - if 'framework' in params: - path_params['framework'] = params['framework'] # noqa: E501 - if 'instance_slug' in params: - path_params['instanceSlug'] = params['instance_slug'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['application/json']) # noqa: E501 - - # Authentication setting - auth_settings = ['basicAuth'] # noqa: E501 - - return self.api_client.call_api( - '/models/{ownerSlug}/{modelSlug}/{framework}/{instanceSlug}/get', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Result', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def kernel_output(self, user_name, kernel_slug, **kwargs): # noqa: E501 - """Download the latest output from a kernel # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.kernel_output(user_name, kernel_slug, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str user_name: Kernel owner (required) - :param str kernel_slug: Kernel name (required) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.kernel_output_with_http_info(user_name, kernel_slug, **kwargs) # noqa: E501 - else: - (data) = self.kernel_output_with_http_info(user_name, kernel_slug, **kwargs) # noqa: E501 - return data - - def kernel_output_with_http_info(self, user_name, kernel_slug, **kwargs): # noqa: E501 - """Download the latest output from a kernel # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.kernel_output_with_http_info(user_name, kernel_slug, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str user_name: Kernel owner (required) - :param str kernel_slug: Kernel name (required) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['user_name', 'kernel_slug'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method kernel_output" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'user_name' is set - if ('user_name' not in params or - params['user_name'] is None): - raise ValueError("Missing the required parameter `user_name` when calling `kernel_output`") # noqa: E501 - # verify the required parameter 'kernel_slug' is set - if ('kernel_slug' not in params or - params['kernel_slug'] is None): - raise ValueError("Missing the required parameter `kernel_slug` when calling `kernel_output`") # noqa: E501 - - collection_formats = {} - - path_params = {} - - query_params = [] - if 'user_name' in params: - query_params.append(('userName', params['user_name'])) # noqa: E501 - if 'kernel_slug' in params: - query_params.append(('kernelSlug', params['kernel_slug'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 - - # Authentication setting - auth_settings = ['basicAuth'] # noqa: E501 - - return self.api_client.call_api( - '/kernels/output', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Result', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def kernel_pull(self, user_name, kernel_slug, **kwargs): # noqa: E501 - """Pull the latest code from a kernel # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.kernel_pull(user_name, kernel_slug, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str user_name: Kernel owner (required) - :param str kernel_slug: Kernel name (required) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.kernel_pull_with_http_info(user_name, kernel_slug, **kwargs) # noqa: E501 - else: - (data) = self.kernel_pull_with_http_info(user_name, kernel_slug, **kwargs) # noqa: E501 - return data - - def kernel_pull_with_http_info(self, user_name, kernel_slug, **kwargs): # noqa: E501 - """Pull the latest code from a kernel # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.kernel_pull_with_http_info(user_name, kernel_slug, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str user_name: Kernel owner (required) - :param str kernel_slug: Kernel name (required) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['user_name', 'kernel_slug'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method kernel_pull" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'user_name' is set - if ('user_name' not in params or - params['user_name'] is None): - raise ValueError("Missing the required parameter `user_name` when calling `kernel_pull`") # noqa: E501 - # verify the required parameter 'kernel_slug' is set - if ('kernel_slug' not in params or - params['kernel_slug'] is None): - raise ValueError("Missing the required parameter `kernel_slug` when calling `kernel_pull`") # noqa: E501 - - collection_formats = {} - - path_params = {} - - query_params = [] - if 'user_name' in params: - query_params.append(('userName', params['user_name'])) # noqa: E501 - if 'kernel_slug' in params: - query_params.append(('kernelSlug', params['kernel_slug'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 - - # Authentication setting - auth_settings = ['basicAuth'] # noqa: E501 - - return self.api_client.call_api( - '/kernels/pull', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Result', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def kernel_push(self, kernel_push_request, **kwargs): # noqa: E501 - """Push a new kernel version. Can be used to create a new kernel and update an existing one. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.kernel_push(kernel_push_request, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param KernelPushRequest kernel_push_request: Information for pushing a new kernel version (required) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.kernel_push_with_http_info(kernel_push_request, **kwargs) # noqa: E501 - else: - (data) = self.kernel_push_with_http_info(kernel_push_request, **kwargs) # noqa: E501 - return data - - def kernel_push_with_http_info(self, kernel_push_request, **kwargs): # noqa: E501 - """Push a new kernel version. Can be used to create a new kernel and update an existing one. # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.kernel_push_with_http_info(kernel_push_request, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param KernelPushRequest kernel_push_request: Information for pushing a new kernel version (required) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['kernel_push_request'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method kernel_push" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'kernel_push_request' is set - if ('kernel_push_request' not in params or - params['kernel_push_request'] is None): - raise ValueError("Missing the required parameter `kernel_push_request` when calling `kernel_push`") # noqa: E501 - - collection_formats = {} - - path_params = {} - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'kernel_push_request' in params: - body_params = params['kernel_push_request'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['application/json']) # noqa: E501 - - # Authentication setting - auth_settings = ['basicAuth'] # noqa: E501 - - return self.api_client.call_api( - '/kernels/push', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Result', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def kernel_status(self, user_name, kernel_slug, **kwargs): # noqa: E501 - """Get the status of the latest kernel version # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.kernel_status(user_name, kernel_slug, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str user_name: Kernel owner (required) - :param str kernel_slug: Kernel name (required) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.kernel_status_with_http_info(user_name, kernel_slug, **kwargs) # noqa: E501 - else: - (data) = self.kernel_status_with_http_info(user_name, kernel_slug, **kwargs) # noqa: E501 - return data - - def kernel_status_with_http_info(self, user_name, kernel_slug, **kwargs): # noqa: E501 - """Get the status of the latest kernel version # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.kernel_status_with_http_info(user_name, kernel_slug, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str user_name: Kernel owner (required) - :param str kernel_slug: Kernel name (required) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['user_name', 'kernel_slug'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method kernel_status" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'user_name' is set - if ('user_name' not in params or - params['user_name'] is None): - raise ValueError("Missing the required parameter `user_name` when calling `kernel_status`") # noqa: E501 - # verify the required parameter 'kernel_slug' is set - if ('kernel_slug' not in params or - params['kernel_slug'] is None): - raise ValueError("Missing the required parameter `kernel_slug` when calling `kernel_status`") # noqa: E501 - - collection_formats = {} - - path_params = {} - - query_params = [] - if 'user_name' in params: - query_params.append(('userName', params['user_name'])) # noqa: E501 - if 'kernel_slug' in params: - query_params.append(('kernelSlug', params['kernel_slug'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 - - # Authentication setting - auth_settings = ['basicAuth'] # noqa: E501 - - return self.api_client.call_api( - '/kernels/status', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Result', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def kernels_list(self, **kwargs): # noqa: E501 - """List kernels # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.kernels_list(async_req=True) - >>> result = thread.get() - - :param async_req bool - :param int page: Page number - :param int page_size: Number of items per page (default 20) - :param str search: Search terms - :param str group: Display only your kernels - :param str user: Display kernels by a particular group - :param str language: Display kernels in a specific language - :param str kernel_type: Display kernels of a specific type - :param str output_type: Display kernels with a specific output type - :param str sort_by: Sort the results. 'relevance' only works if there is a search query - :param str dataset: Display kernels using the specified dataset - :param str competition: Display kernels using the specified competition - :param str parent_kernel: Display kernels that have forked the specified kernel - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.kernels_list_with_http_info(**kwargs) # noqa: E501 - else: - (data) = self.kernels_list_with_http_info(**kwargs) # noqa: E501 - return data - - def kernels_list_with_http_info(self, **kwargs): # noqa: E501 - """List kernels # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.kernels_list_with_http_info(async_req=True) - >>> result = thread.get() - - :param async_req bool - :param int page: Page number - :param int page_size: Number of items per page (default 20) - :param str search: Search terms - :param str group: Display only your kernels - :param str user: Display kernels by a particular group - :param str language: Display kernels in a specific language - :param str kernel_type: Display kernels of a specific type - :param str output_type: Display kernels with a specific output type - :param str sort_by: Sort the results. 'relevance' only works if there is a search query - :param str dataset: Display kernels using the specified dataset - :param str competition: Display kernels using the specified competition - :param str parent_kernel: Display kernels that have forked the specified kernel - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['page', 'page_size', 'search', 'group', 'user', 'language', 'kernel_type', 'output_type', 'sort_by', 'dataset', 'competition', 'parent_kernel'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method kernels_list" % key - ) - params[key] = val - del params['kwargs'] - - collection_formats = {} - - path_params = {} - - query_params = [] - if 'page' in params: - query_params.append(('page', params['page'])) # noqa: E501 - if 'page_size' in params: - query_params.append(('pageSize', params['page_size'])) # noqa: E501 - if 'search' in params: - query_params.append(('search', params['search'])) # noqa: E501 - if 'group' in params: - query_params.append(('group', params['group'])) # noqa: E501 - if 'user' in params: - query_params.append(('user', params['user'])) # noqa: E501 - if 'language' in params: - query_params.append(('language', params['language'])) # noqa: E501 - if 'kernel_type' in params: - query_params.append(('kernelType', params['kernel_type'])) # noqa: E501 - if 'output_type' in params: - query_params.append(('outputType', params['output_type'])) # noqa: E501 - if 'sort_by' in params: - query_params.append(('sortBy', params['sort_by'])) # noqa: E501 - if 'dataset' in params: - query_params.append(('dataset', params['dataset'])) # noqa: E501 - if 'competition' in params: - query_params.append(('competition', params['competition'])) # noqa: E501 - if 'parent_kernel' in params: - query_params.append(('parentKernel', params['parent_kernel'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 - - # Authentication setting - auth_settings = ['basicAuth'] # noqa: E501 - - return self.api_client.call_api( - '/kernels/list', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Result', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def kernels_list_files(self, user_name, kernel_slug, **kwargs): # noqa: E501 - """List kernel files # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.kernels_list_files(user_name, kernel_slug, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str user_name: Kernel owner (required) - :param str kernel_slug: Kernel name (required) - :param str kernel_version_number: Kernel version number - :param int page_size: Number of items per page (default 20) - :param str page_token: Page token for pagination - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.kernels_list_files_with_http_info(user_name, kernel_slug, **kwargs) # noqa: E501 - else: - (data) = self.kernels_list_files_with_http_info(user_name, kernel_slug, **kwargs) # noqa: E501 - return data - - def kernels_list_files_with_http_info(self, user_name, kernel_slug, **kwargs): # noqa: E501 - """List kernel files # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.kernels_list_files_with_http_info(user_name, kernel_slug, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str user_name: Kernel owner (required) - :param str kernel_slug: Kernel name (required) - :param str kernel_version_number: Kernel version number - :param int page_size: Number of items per page (default 20) - :param str page_token: Page token for pagination - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['user_name', 'kernel_slug', 'kernel_version_number', 'page_size', 'page_token'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method kernels_list_files" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'user_name' is set - if ('user_name' not in params or - params['user_name'] is None): - raise ValueError("Missing the required parameter `user_name` when calling `kernels_list_files`") # noqa: E501 - # verify the required parameter 'kernel_slug' is set - if ('kernel_slug' not in params or - params['kernel_slug'] is None): - raise ValueError("Missing the required parameter `kernel_slug` when calling `kernels_list_files`") # noqa: E501 - - collection_formats = {} - - path_params = {} - - query_params = [] - if 'user_name' in params: - query_params.append(('userName', params['user_name'])) # noqa: E501 - if 'kernel_slug' in params: - query_params.append(('kernelSlug', params['kernel_slug'])) # noqa: E501 - if 'kernel_version_number' in params: - query_params.append(('kernelVersionNumber', params['kernel_version_number'])) # noqa: E501 - if 'page_size' in params: - query_params.append(('pageSize', params['page_size'])) # noqa: E501 - if 'page_token' in params: - query_params.append(('pageToken', params['page_token'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 - - # Authentication setting - auth_settings = ['basicAuth'] # noqa: E501 - - return self.api_client.call_api( - '/kernels/files', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Result', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def metadata_get(self, owner_slug, dataset_slug, **kwargs): # noqa: E501 - """Get the metadata for a dataset # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.metadata_get(owner_slug, dataset_slug, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str owner_slug: Dataset owner (required) - :param str dataset_slug: Dataset name (required) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.metadata_get_with_http_info(owner_slug, dataset_slug, **kwargs) # noqa: E501 - else: - (data) = self.metadata_get_with_http_info(owner_slug, dataset_slug, **kwargs) # noqa: E501 - return data - - def metadata_get_with_http_info(self, owner_slug, dataset_slug, **kwargs): # noqa: E501 - """Get the metadata for a dataset # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.metadata_get_with_http_info(owner_slug, dataset_slug, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str owner_slug: Dataset owner (required) - :param str dataset_slug: Dataset name (required) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['owner_slug', 'dataset_slug'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method metadata_get" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'owner_slug' is set - if ('owner_slug' not in params or - params['owner_slug'] is None): - raise ValueError("Missing the required parameter `owner_slug` when calling `metadata_get`") # noqa: E501 - # verify the required parameter 'dataset_slug' is set - if ('dataset_slug' not in params or - params['dataset_slug'] is None): - raise ValueError("Missing the required parameter `dataset_slug` when calling `metadata_get`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'owner_slug' in params: - path_params['ownerSlug'] = params['owner_slug'] # noqa: E501 - if 'dataset_slug' in params: - path_params['datasetSlug'] = params['dataset_slug'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 - - # Authentication setting - auth_settings = ['basicAuth'] # noqa: E501 - - return self.api_client.call_api( - '/datasets/metadata/{ownerSlug}/{datasetSlug}', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Result', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def metadata_post(self, owner_slug, dataset_slug, settings, **kwargs): # noqa: E501 - """Update the metadata for a dataset # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.metadata_post(owner_slug, dataset_slug, settings, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str owner_slug: Dataset owner (required) - :param str dataset_slug: Dataset name (required) - :param DatasetUpdateSettingsRequest settings: Dataset metadata to update (required) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.metadata_post_with_http_info(owner_slug, dataset_slug, settings, **kwargs) # noqa: E501 - else: - (data) = self.metadata_post_with_http_info(owner_slug, dataset_slug, settings, **kwargs) # noqa: E501 - return data - - def metadata_post_with_http_info(self, owner_slug, dataset_slug, settings, **kwargs): # noqa: E501 - """Update the metadata for a dataset # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.metadata_post_with_http_info(owner_slug, dataset_slug, settings, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str owner_slug: Dataset owner (required) - :param str dataset_slug: Dataset name (required) - :param DatasetUpdateSettingsRequest settings: Dataset metadata to update (required) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['owner_slug', 'dataset_slug', 'settings'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method metadata_post" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'owner_slug' is set - if ('owner_slug' not in params or - params['owner_slug'] is None): - raise ValueError("Missing the required parameter `owner_slug` when calling `metadata_post`") # noqa: E501 - # verify the required parameter 'dataset_slug' is set - if ('dataset_slug' not in params or - params['dataset_slug'] is None): - raise ValueError("Missing the required parameter `dataset_slug` when calling `metadata_post`") # noqa: E501 - # verify the required parameter 'settings' is set - if ('settings' not in params or - params['settings'] is None): - raise ValueError("Missing the required parameter `settings` when calling `metadata_post`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'owner_slug' in params: - path_params['ownerSlug'] = params['owner_slug'] # noqa: E501 - if 'dataset_slug' in params: - path_params['datasetSlug'] = params['dataset_slug'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'settings' in params: - body_params = params['settings'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['application/json']) # noqa: E501 - - # Authentication setting - auth_settings = ['basicAuth'] # noqa: E501 - - return self.api_client.call_api( - '/datasets/metadata/{ownerSlug}/{datasetSlug}', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Result', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def model_instance_files(self, owner_slug, model_slug, framework, instance_slug, **kwargs): # noqa: E501 - """List model instance files for the current version # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.model_instance_files(owner_slug, model_slug, framework, instance_slug, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str owner_slug: Model owner (required) - :param str model_slug: Model name (required) - :param str framework: Model instance framework (required) - :param str instance_slug: Model instance slug (required) - :param int page_size: Number of items per page (default 20) - :param str page_token: Page token for pagination - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.model_instance_files_with_http_info(owner_slug, model_slug, framework, instance_slug, **kwargs) # noqa: E501 - else: - (data) = self.model_instance_files_with_http_info(owner_slug, model_slug, framework, instance_slug, **kwargs) # noqa: E501 - return data - - def model_instance_files_with_http_info(self, owner_slug, model_slug, framework, instance_slug, **kwargs): # noqa: E501 - """List model instance files for the current version # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.model_instance_files_with_http_info(owner_slug, model_slug, framework, instance_slug, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str owner_slug: Model owner (required) - :param str model_slug: Model name (required) - :param str framework: Model instance framework (required) - :param str instance_slug: Model instance slug (required) - :param int page_size: Number of items per page (default 20) - :param str page_token: Page token for pagination - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['owner_slug', 'model_slug', 'framework', 'instance_slug', 'page_size', 'page_token'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method model_instance_files" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'owner_slug' is set - if ('owner_slug' not in params or - params['owner_slug'] is None): - raise ValueError("Missing the required parameter `owner_slug` when calling `model_instance_files`") # noqa: E501 - # verify the required parameter 'model_slug' is set - if ('model_slug' not in params or - params['model_slug'] is None): - raise ValueError("Missing the required parameter `model_slug` when calling `model_instance_files`") # noqa: E501 - # verify the required parameter 'framework' is set - if ('framework' not in params or - params['framework'] is None): - raise ValueError("Missing the required parameter `framework` when calling `model_instance_files`") # noqa: E501 - # verify the required parameter 'instance_slug' is set - if ('instance_slug' not in params or - params['instance_slug'] is None): - raise ValueError("Missing the required parameter `instance_slug` when calling `model_instance_files`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'owner_slug' in params: - path_params['ownerSlug'] = params['owner_slug'] # noqa: E501 - if 'model_slug' in params: - path_params['modelSlug'] = params['model_slug'] # noqa: E501 - if 'framework' in params: - path_params['framework'] = params['framework'] # noqa: E501 - if 'instance_slug' in params: - path_params['instanceSlug'] = params['instance_slug'] # noqa: E501 - - query_params = [] - if 'page_size' in params: - query_params.append(('pageSize', params['page_size'])) # noqa: E501 - if 'page_token' in params: - query_params.append(('pageToken', params['page_token'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 - - # Authentication setting - auth_settings = ['basicAuth'] # noqa: E501 - - return self.api_client.call_api( - '/models/{ownerSlug}/{modelSlug}/{framework}/{instanceSlug}/files', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Result', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def model_instance_version_files(self, owner_slug, model_slug, framework, instance_slug, version_number, **kwargs): # noqa: E501 - """List model instance version files # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.model_instance_version_files(owner_slug, model_slug, framework, instance_slug, version_number, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str owner_slug: Model owner (required) - :param str model_slug: Model name (required) - :param str framework: Model instance framework (required) - :param str instance_slug: Model instance slug (required) - :param str version_number: Model instance version number (required) - :param int page_size: Number of items per page (default 20) - :param str page_token: Page token for pagination - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.model_instance_version_files_with_http_info(owner_slug, model_slug, framework, instance_slug, version_number, **kwargs) # noqa: E501 - else: - (data) = self.model_instance_version_files_with_http_info(owner_slug, model_slug, framework, instance_slug, version_number, **kwargs) # noqa: E501 - return data - - def model_instance_version_files_with_http_info(self, owner_slug, model_slug, framework, instance_slug, version_number, **kwargs): # noqa: E501 - """List model instance version files # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.model_instance_version_files_with_http_info(owner_slug, model_slug, framework, instance_slug, version_number, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str owner_slug: Model owner (required) - :param str model_slug: Model name (required) - :param str framework: Model instance framework (required) - :param str instance_slug: Model instance slug (required) - :param str version_number: Model instance version number (required) - :param int page_size: Number of items per page (default 20) - :param str page_token: Page token for pagination - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['owner_slug', 'model_slug', 'framework', 'instance_slug', 'version_number', 'page_size', 'page_token'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method model_instance_version_files" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'owner_slug' is set - if ('owner_slug' not in params or - params['owner_slug'] is None): - raise ValueError("Missing the required parameter `owner_slug` when calling `model_instance_version_files`") # noqa: E501 - # verify the required parameter 'model_slug' is set - if ('model_slug' not in params or - params['model_slug'] is None): - raise ValueError("Missing the required parameter `model_slug` when calling `model_instance_version_files`") # noqa: E501 - # verify the required parameter 'framework' is set - if ('framework' not in params or - params['framework'] is None): - raise ValueError("Missing the required parameter `framework` when calling `model_instance_version_files`") # noqa: E501 - # verify the required parameter 'instance_slug' is set - if ('instance_slug' not in params or - params['instance_slug'] is None): - raise ValueError("Missing the required parameter `instance_slug` when calling `model_instance_version_files`") # noqa: E501 - # verify the required parameter 'version_number' is set - if ('version_number' not in params or - params['version_number'] is None): - raise ValueError("Missing the required parameter `version_number` when calling `model_instance_version_files`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'owner_slug' in params: - path_params['ownerSlug'] = params['owner_slug'] # noqa: E501 - if 'model_slug' in params: - path_params['modelSlug'] = params['model_slug'] # noqa: E501 - if 'framework' in params: - path_params['framework'] = params['framework'] # noqa: E501 - if 'instance_slug' in params: - path_params['instanceSlug'] = params['instance_slug'] # noqa: E501 - if 'version_number' in params: - path_params['versionNumber'] = params['version_number'] # noqa: E501 - - query_params = [] - if 'page_size' in params: - query_params.append(('pageSize', params['page_size'])) # noqa: E501 - if 'page_token' in params: - query_params.append(('pageToken', params['page_token'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 - - # Authentication setting - auth_settings = ['basicAuth'] # noqa: E501 - - return self.api_client.call_api( - '/models/{ownerSlug}/{modelSlug}/{framework}/{instanceSlug}/{versionNumber}/files', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Result', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def model_instance_versions_download(self, owner_slug, model_slug, framework, instance_slug, version_number, **kwargs): # noqa: E501 - """Download model instance version files # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.model_instance_versions_download(owner_slug, model_slug, framework, instance_slug, version_number, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str owner_slug: Model owner (required) - :param str model_slug: Model name (required) - :param str framework: Model instance framework (required) - :param str instance_slug: Model instance slug (required) - :param str version_number: Model instance version number (required) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.model_instance_versions_download_with_http_info(owner_slug, model_slug, framework, instance_slug, version_number, **kwargs) # noqa: E501 - else: - (data) = self.model_instance_versions_download_with_http_info(owner_slug, model_slug, framework, instance_slug, version_number, **kwargs) # noqa: E501 - return data - - def model_instance_versions_download_with_http_info(self, owner_slug, model_slug, framework, instance_slug, version_number, **kwargs): # noqa: E501 - """Download model instance version files # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.model_instance_versions_download_with_http_info(owner_slug, model_slug, framework, instance_slug, version_number, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str owner_slug: Model owner (required) - :param str model_slug: Model name (required) - :param str framework: Model instance framework (required) - :param str instance_slug: Model instance slug (required) - :param str version_number: Model instance version number (required) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['owner_slug', 'model_slug', 'framework', 'instance_slug', 'version_number'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method model_instance_versions_download" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'owner_slug' is set - if ('owner_slug' not in params or - params['owner_slug'] is None): - raise ValueError("Missing the required parameter `owner_slug` when calling `model_instance_versions_download`") # noqa: E501 - # verify the required parameter 'model_slug' is set - if ('model_slug' not in params or - params['model_slug'] is None): - raise ValueError("Missing the required parameter `model_slug` when calling `model_instance_versions_download`") # noqa: E501 - # verify the required parameter 'framework' is set - if ('framework' not in params or - params['framework'] is None): - raise ValueError("Missing the required parameter `framework` when calling `model_instance_versions_download`") # noqa: E501 - # verify the required parameter 'instance_slug' is set - if ('instance_slug' not in params or - params['instance_slug'] is None): - raise ValueError("Missing the required parameter `instance_slug` when calling `model_instance_versions_download`") # noqa: E501 - # verify the required parameter 'version_number' is set - if ('version_number' not in params or - params['version_number'] is None): - raise ValueError("Missing the required parameter `version_number` when calling `model_instance_versions_download`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'owner_slug' in params: - path_params['ownerSlug'] = params['owner_slug'] # noqa: E501 - if 'model_slug' in params: - path_params['modelSlug'] = params['model_slug'] # noqa: E501 - if 'framework' in params: - path_params['framework'] = params['framework'] # noqa: E501 - if 'instance_slug' in params: - path_params['instanceSlug'] = params['instance_slug'] # noqa: E501 - if 'version_number' in params: - path_params['versionNumber'] = params['version_number'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['file']) # noqa: E501 - - # Authentication setting - auth_settings = ['basicAuth'] # noqa: E501 - - return self.api_client.call_api( - '/models/{ownerSlug}/{modelSlug}/{framework}/{instanceSlug}/{versionNumber}/download', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Result', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def models_create_instance(self, owner_slug, model_slug, model_new_instance_request, **kwargs): # noqa: E501 - """Create a new model instance # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.models_create_instance(owner_slug, model_slug, model_new_instance_request, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str owner_slug: Model owner (required) - :param str model_slug: Model slug (required) - :param ModelNewInstanceRequest model_new_instance_request: Information for creating a new model instance (required) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.models_create_instance_with_http_info(owner_slug, model_slug, model_new_instance_request, **kwargs) # noqa: E501 - else: - (data) = self.models_create_instance_with_http_info(owner_slug, model_slug, model_new_instance_request, **kwargs) # noqa: E501 - return data - - def models_create_instance_with_http_info(self, owner_slug, model_slug, model_new_instance_request, **kwargs): # noqa: E501 - """Create a new model instance # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.models_create_instance_with_http_info(owner_slug, model_slug, model_new_instance_request, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str owner_slug: Model owner (required) - :param str model_slug: Model slug (required) - :param ModelNewInstanceRequest model_new_instance_request: Information for creating a new model instance (required) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['owner_slug', 'model_slug', 'model_new_instance_request'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method models_create_instance" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'owner_slug' is set - if ('owner_slug' not in params or - params['owner_slug'] is None): - raise ValueError("Missing the required parameter `owner_slug` when calling `models_create_instance`") # noqa: E501 - # verify the required parameter 'model_slug' is set - if ('model_slug' not in params or - params['model_slug'] is None): - raise ValueError("Missing the required parameter `model_slug` when calling `models_create_instance`") # noqa: E501 - # verify the required parameter 'model_new_instance_request' is set - if ('model_new_instance_request' not in params or - params['model_new_instance_request'] is None): - raise ValueError("Missing the required parameter `model_new_instance_request` when calling `models_create_instance`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'owner_slug' in params: - path_params['ownerSlug'] = params['owner_slug'] # noqa: E501 - if 'model_slug' in params: - path_params['modelSlug'] = params['model_slug'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'model_new_instance_request' in params: - body_params = params['model_new_instance_request'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['application/json']) # noqa: E501 - - # Authentication setting - auth_settings = ['basicAuth'] # noqa: E501 - - return self.api_client.call_api( - '/models/{ownerSlug}/{modelSlug}/create/instance', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Result', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def models_create_instance_version(self, owner_slug, model_slug, framework, instance_slug, model_instance_new_version_request, **kwargs): # noqa: E501 - """Create a new model instance version # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.models_create_instance_version(owner_slug, model_slug, framework, instance_slug, model_instance_new_version_request, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str owner_slug: Model owner (required) - :param str model_slug: Model slug (required) - :param str framework: Model instance framework (required) - :param str instance_slug: Model instance slug (required) - :param ModelInstanceNewVersionRequest model_instance_new_version_request: Information for creating a new model instance version (required) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.models_create_instance_version_with_http_info(owner_slug, model_slug, framework, instance_slug, model_instance_new_version_request, **kwargs) # noqa: E501 - else: - (data) = self.models_create_instance_version_with_http_info(owner_slug, model_slug, framework, instance_slug, model_instance_new_version_request, **kwargs) # noqa: E501 - return data - - def models_create_instance_version_with_http_info(self, owner_slug, model_slug, framework, instance_slug, model_instance_new_version_request, **kwargs): # noqa: E501 - """Create a new model instance version # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.models_create_instance_version_with_http_info(owner_slug, model_slug, framework, instance_slug, model_instance_new_version_request, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str owner_slug: Model owner (required) - :param str model_slug: Model slug (required) - :param str framework: Model instance framework (required) - :param str instance_slug: Model instance slug (required) - :param ModelInstanceNewVersionRequest model_instance_new_version_request: Information for creating a new model instance version (required) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['owner_slug', 'model_slug', 'framework', 'instance_slug', 'model_instance_new_version_request'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method models_create_instance_version" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'owner_slug' is set - if ('owner_slug' not in params or - params['owner_slug'] is None): - raise ValueError("Missing the required parameter `owner_slug` when calling `models_create_instance_version`") # noqa: E501 - # verify the required parameter 'model_slug' is set - if ('model_slug' not in params or - params['model_slug'] is None): - raise ValueError("Missing the required parameter `model_slug` when calling `models_create_instance_version`") # noqa: E501 - # verify the required parameter 'framework' is set - if ('framework' not in params or - params['framework'] is None): - raise ValueError("Missing the required parameter `framework` when calling `models_create_instance_version`") # noqa: E501 - # verify the required parameter 'instance_slug' is set - if ('instance_slug' not in params or - params['instance_slug'] is None): - raise ValueError("Missing the required parameter `instance_slug` when calling `models_create_instance_version`") # noqa: E501 - # verify the required parameter 'model_instance_new_version_request' is set - if ('model_instance_new_version_request' not in params or - params['model_instance_new_version_request'] is None): - raise ValueError("Missing the required parameter `model_instance_new_version_request` when calling `models_create_instance_version`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'owner_slug' in params: - path_params['ownerSlug'] = params['owner_slug'] # noqa: E501 - if 'model_slug' in params: - path_params['modelSlug'] = params['model_slug'] # noqa: E501 - if 'framework' in params: - path_params['framework'] = params['framework'] # noqa: E501 - if 'instance_slug' in params: - path_params['instanceSlug'] = params['instance_slug'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'model_instance_new_version_request' in params: - body_params = params['model_instance_new_version_request'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['application/json']) # noqa: E501 - - # Authentication setting - auth_settings = ['basicAuth'] # noqa: E501 - - return self.api_client.call_api( - '/models/{ownerSlug}/{modelSlug}/{framework}/{instanceSlug}/create/version', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Result', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def models_create_new(self, model_new_request, **kwargs): # noqa: E501 - """Create a new model # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.models_create_new(model_new_request, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param ModelNewRequest model_new_request: Information for creating a new model (required) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.models_create_new_with_http_info(model_new_request, **kwargs) # noqa: E501 - else: - (data) = self.models_create_new_with_http_info(model_new_request, **kwargs) # noqa: E501 - return data - - def models_create_new_with_http_info(self, model_new_request, **kwargs): # noqa: E501 - """Create a new model # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.models_create_new_with_http_info(model_new_request, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param ModelNewRequest model_new_request: Information for creating a new model (required) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['model_new_request'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method models_create_new" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'model_new_request' is set - if ('model_new_request' not in params or - params['model_new_request'] is None): - raise ValueError("Missing the required parameter `model_new_request` when calling `models_create_new`") # noqa: E501 - - collection_formats = {} - - path_params = {} - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'model_new_request' in params: - body_params = params['model_new_request'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['application/json']) # noqa: E501 - - # Authentication setting - auth_settings = ['basicAuth'] # noqa: E501 - - return self.api_client.call_api( - '/models/create/new', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Result', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def models_list(self, **kwargs): # noqa: E501 - """Lists models # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.models_list(async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str search: Search terms - :param str sort_by: Sort the results - :param str owner: Display models by a specific user or organization - :param int page_size: Number of items per page (default 20) - :param str page_token: Page token for pagination - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.models_list_with_http_info(**kwargs) # noqa: E501 - else: - (data) = self.models_list_with_http_info(**kwargs) # noqa: E501 - return data - - def models_list_with_http_info(self, **kwargs): # noqa: E501 - """Lists models # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.models_list_with_http_info(async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str search: Search terms - :param str sort_by: Sort the results - :param str owner: Display models by a specific user or organization - :param int page_size: Number of items per page (default 20) - :param str page_token: Page token for pagination - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['search', 'sort_by', 'owner', 'page_size', 'page_token'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method models_list" % key - ) - params[key] = val - del params['kwargs'] - - collection_formats = {} - - path_params = {} - - query_params = [] - if 'search' in params: - query_params.append(('search', params['search'])) # noqa: E501 - if 'sort_by' in params: - query_params.append(('sortBy', params['sort_by'])) # noqa: E501 - if 'owner' in params: - query_params.append(('owner', params['owner'])) # noqa: E501 - if 'page_size' in params: - query_params.append(('pageSize', params['page_size'])) # noqa: E501 - if 'page_token' in params: - query_params.append(('pageToken', params['page_token'])) # noqa: E501 - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 - - # Authentication setting - auth_settings = ['basicAuth'] # noqa: E501 - - return self.api_client.call_api( - '/models/list', 'GET', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Result', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def update_model(self, owner_slug, model_slug, model_update_request, **kwargs): # noqa: E501 - """Update a model # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.update_model(owner_slug, model_slug, model_update_request, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str owner_slug: Model owner (required) - :param str model_slug: Model name (required) - :param ModelUpdateRequest model_update_request: Information for updating a model (required) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.update_model_with_http_info(owner_slug, model_slug, model_update_request, **kwargs) # noqa: E501 - else: - (data) = self.update_model_with_http_info(owner_slug, model_slug, model_update_request, **kwargs) # noqa: E501 - return data - - def update_model_with_http_info(self, owner_slug, model_slug, model_update_request, **kwargs): # noqa: E501 - """Update a model # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.update_model_with_http_info(owner_slug, model_slug, model_update_request, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str owner_slug: Model owner (required) - :param str model_slug: Model name (required) - :param ModelUpdateRequest model_update_request: Information for updating a model (required) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['owner_slug', 'model_slug', 'model_update_request'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method update_model" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'owner_slug' is set - if ('owner_slug' not in params or - params['owner_slug'] is None): - raise ValueError("Missing the required parameter `owner_slug` when calling `update_model`") # noqa: E501 - # verify the required parameter 'model_slug' is set - if ('model_slug' not in params or - params['model_slug'] is None): - raise ValueError("Missing the required parameter `model_slug` when calling `update_model`") # noqa: E501 - # verify the required parameter 'model_update_request' is set - if ('model_update_request' not in params or - params['model_update_request'] is None): - raise ValueError("Missing the required parameter `model_update_request` when calling `update_model`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'owner_slug' in params: - path_params['ownerSlug'] = params['owner_slug'] # noqa: E501 - if 'model_slug' in params: - path_params['modelSlug'] = params['model_slug'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'model_update_request' in params: - body_params = params['model_update_request'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['application/json']) # noqa: E501 - - # Authentication setting - auth_settings = ['basicAuth'] # noqa: E501 - - return self.api_client.call_api( - '/models/{ownerSlug}/{modelSlug}/update', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Result', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def update_model_instance(self, owner_slug, model_slug, framework, instance_slug, model_instance_update_request, **kwargs): # noqa: E501 - """Update a model # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.update_model_instance(owner_slug, model_slug, framework, instance_slug, model_instance_update_request, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str owner_slug: Model owner (required) - :param str model_slug: Model name (required) - :param str framework: Model instance framework (required) - :param str instance_slug: Model instance slug (required) - :param ModelInstanceUpdateRequest model_instance_update_request: Information for updating a model instance (required) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.update_model_instance_with_http_info(owner_slug, model_slug, framework, instance_slug, model_instance_update_request, **kwargs) # noqa: E501 - else: - (data) = self.update_model_instance_with_http_info(owner_slug, model_slug, framework, instance_slug, model_instance_update_request, **kwargs) # noqa: E501 - return data - - def update_model_instance_with_http_info(self, owner_slug, model_slug, framework, instance_slug, model_instance_update_request, **kwargs): # noqa: E501 - """Update a model # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.update_model_instance_with_http_info(owner_slug, model_slug, framework, instance_slug, model_instance_update_request, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param str owner_slug: Model owner (required) - :param str model_slug: Model name (required) - :param str framework: Model instance framework (required) - :param str instance_slug: Model instance slug (required) - :param ModelInstanceUpdateRequest model_instance_update_request: Information for updating a model instance (required) - :return: Result - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['owner_slug', 'model_slug', 'framework', 'instance_slug', 'model_instance_update_request'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method update_model_instance" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'owner_slug' is set - if ('owner_slug' not in params or - params['owner_slug'] is None): - raise ValueError("Missing the required parameter `owner_slug` when calling `update_model_instance`") # noqa: E501 - # verify the required parameter 'model_slug' is set - if ('model_slug' not in params or - params['model_slug'] is None): - raise ValueError("Missing the required parameter `model_slug` when calling `update_model_instance`") # noqa: E501 - # verify the required parameter 'framework' is set - if ('framework' not in params or - params['framework'] is None): - raise ValueError("Missing the required parameter `framework` when calling `update_model_instance`") # noqa: E501 - # verify the required parameter 'instance_slug' is set - if ('instance_slug' not in params or - params['instance_slug'] is None): - raise ValueError("Missing the required parameter `instance_slug` when calling `update_model_instance`") # noqa: E501 - # verify the required parameter 'model_instance_update_request' is set - if ('model_instance_update_request' not in params or - params['model_instance_update_request'] is None): - raise ValueError("Missing the required parameter `model_instance_update_request` when calling `update_model_instance`") # noqa: E501 - - collection_formats = {} - - path_params = {} - if 'owner_slug' in params: - path_params['ownerSlug'] = params['owner_slug'] # noqa: E501 - if 'model_slug' in params: - path_params['modelSlug'] = params['model_slug'] # noqa: E501 - if 'framework' in params: - path_params['framework'] = params['framework'] # noqa: E501 - if 'instance_slug' in params: - path_params['instanceSlug'] = params['instance_slug'] # noqa: E501 - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'model_instance_update_request' in params: - body_params = params['model_instance_update_request'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['application/json']) # noqa: E501 - - # Authentication setting - auth_settings = ['basicAuth'] # noqa: E501 - - return self.api_client.call_api( - '/models/{ownerSlug}/{modelSlug}/{framework}/{instanceSlug}/update', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='Result', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) - - def upload_file(self, start_blob_upload_request, **kwargs): # noqa: E501 - """Start uploading a file # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.upload_file(start_blob_upload_request, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param StartBlobUploadRequest start_blob_upload_request: (required) - :return: StartBlobUploadResponse - If the method is called asynchronously, - returns the request thread. - """ - kwargs['_return_http_data_only'] = True - if kwargs.get('async_req'): - return self.upload_file_with_http_info(start_blob_upload_request, **kwargs) # noqa: E501 - else: - (data) = self.upload_file_with_http_info(start_blob_upload_request, **kwargs) # noqa: E501 - return data - - def upload_file_with_http_info(self, start_blob_upload_request, **kwargs): # noqa: E501 - """Start uploading a file # noqa: E501 - - This method makes a synchronous HTTP request by default. To make an - asynchronous HTTP request, please pass async_req=True - >>> thread = api.upload_file_with_http_info(start_blob_upload_request, async_req=True) - >>> result = thread.get() - - :param async_req bool - :param StartBlobUploadRequest start_blob_upload_request: (required) - :return: StartBlobUploadResponse - If the method is called asynchronously, - returns the request thread. - """ - - all_params = ['start_blob_upload_request'] # noqa: E501 - all_params.append('async_req') - all_params.append('_return_http_data_only') - all_params.append('_preload_content') - all_params.append('_request_timeout') - - params = locals() - for key, val in six.iteritems(params['kwargs']): - if key not in all_params: - raise TypeError( - "Got an unexpected keyword argument '%s'" - " to method upload_file" % key - ) - params[key] = val - del params['kwargs'] - # verify the required parameter 'start_blob_upload_request' is set - if ('start_blob_upload_request' not in params or - params['start_blob_upload_request'] is None): - raise ValueError("Missing the required parameter `start_blob_upload_request` when calling `upload_file`") # noqa: E501 - - collection_formats = {} - - path_params = {} - - query_params = [] - - header_params = {} - - form_params = [] - local_var_files = {} - - body_params = None - if 'start_blob_upload_request' in params: - body_params = params['start_blob_upload_request'] - # HTTP header `Accept` - header_params['Accept'] = self.api_client.select_header_accept( - ['application/json']) # noqa: E501 - - # HTTP header `Content-Type` - header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 - ['application/json']) # noqa: E501 - - # Authentication setting - auth_settings = ['basicAuth'] # noqa: E501 - - return self.api_client.call_api( - '/blobs/upload', 'POST', - path_params, - query_params, - header_params, - body=body_params, - post_params=form_params, - files=local_var_files, - response_type='StartBlobUploadResponse', # noqa: E501 - auth_settings=auth_settings, - async_req=params.get('async_req'), - _return_http_data_only=params.get('_return_http_data_only'), - _preload_content=params.get('_preload_content', True), - _request_timeout=params.get('_request_timeout'), - collection_formats=collection_formats) diff --git a/kaggle/api/kaggle_api_extended.py b/kaggle/api/kaggle_api_extended.py index fc4c3d6..11bbcf0 100644 --- a/kaggle/api/kaggle_api_extended.py +++ b/kaggle/api/kaggle_api_extended.py @@ -14,22 +14,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -#!/usr/bin/python -# -# Copyright 2019 Kaggle Inc -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - # coding=utf-8 from __future__ import print_function @@ -48,6 +32,10 @@ import bleach import requests import urllib3.exceptions as urllib3_exceptions +from requests import RequestException + +from kaggle.models.kaggle_models_extended import ResumableUploadResult, File + from requests.adapters import HTTPAdapter from slugify import slugify from tqdm import tqdm @@ -56,6 +44,9 @@ from kaggle.configuration import Configuration from kagglesdk import KaggleClient, KaggleEnv +from kagglesdk.admin.types.inbox_file_service import CreateInboxFileRequest +from kagglesdk.blobs.types.blob_api_service import ApiStartBlobUploadRequest, \ + ApiStartBlobUploadResponse, ApiBlobType from kagglesdk.competitions.types.competition_api_service import * from kagglesdk.datasets.types.dataset_api_service import ApiListDatasetsRequest, \ ApiListDatasetFilesRequest, \ @@ -67,7 +58,7 @@ from kagglesdk.datasets.types.dataset_enums import DatasetSelectionGroup, \ DatasetSortBy, DatasetFileTypeGroup, DatasetLicenseGroup from kagglesdk.datasets.types.dataset_types import DatasetSettings, \ - SettingsLicense, DatasetCollaborator, DatasetSettingsFile + SettingsLicense, DatasetCollaborator from kagglesdk.kernels.types.kernels_api_service import ApiListKernelsRequest, \ ApiListKernelFilesRequest, ApiSaveKernelRequest, ApiGetKernelRequest, \ ApiListKernelSessionOutputRequest, ApiGetKernelSessionStatusRequest @@ -83,36 +74,8 @@ ApiDownloadModelInstanceVersionRequest, ApiDeleteModelInstanceVersionRequest from kagglesdk.models.types.model_enums import ListModelsOrderBy, \ ModelInstanceType, ModelFramework -from .kaggle_api import KaggleApi -from ..api_client import ApiClient -from ..models.api_blob_type import ApiBlobType -from ..models.collaborator import Collaborator -from ..models.create_inbox_file_request import CreateInboxFileRequest from ..models.dataset_column import DatasetColumn -from ..models.dataset_new_request import DatasetNewRequest -from ..models.dataset_update_settings_request import DatasetUpdateSettingsRequest -from ..models.kaggle_models_extended import DatasetNewResponse -from ..models.kaggle_models_extended import DatasetNewVersionResponse -from ..models.kaggle_models_extended import File -from ..models.kaggle_models_extended import Kernel -from ..models.kaggle_models_extended import KernelPushResponse -from ..models.kaggle_models_extended import ListFilesResult -from ..models.kaggle_models_extended import Metadata -from ..models.kaggle_models_extended import Model -from ..models.kaggle_models_extended import ModelDeleteResponse -from ..models.kaggle_models_extended import ModelNewResponse -from ..models.kaggle_models_extended import ResumableUploadResult -from ..models.kernel_push_request import KernelPushRequest -from ..models.license import License -from ..models.model_instance_new_version_request import ModelInstanceNewVersionRequest -from ..models.model_instance_update_request import ModelInstanceUpdateRequest -from ..models.model_new_instance_request import ModelNewInstanceRequest -from ..models.model_new_request import ModelNewRequest -from ..models.model_update_request import ModelUpdateRequest -from ..models.start_blob_upload_request import StartBlobUploadRequest -from ..models.start_blob_upload_response import StartBlobUploadResponse from ..models.upload_file import UploadFile -from ..rest import ApiException class DirectoryArchive(object): @@ -252,27 +215,30 @@ def cleanup(self): def to_dict(self): return { - 'path': - self.path, - 'start_blob_upload_request': - self.start_blob_upload_request.to_dict(), - 'timestamp': - self.timestamp, - 'start_blob_upload_response': - self.start_blob_upload_response.to_dict() - if self.start_blob_upload_response is not None else None, - 'upload_complete': - self.upload_complete, - } + 'path': + self.path, + 'start_blob_upload_request': + self.start_blob_upload_request.to_dict(), + 'timestamp': + self.timestamp, + 'start_blob_upload_response': + self.start_blob_upload_response.to_dict() + if self.start_blob_upload_response is not None else None, + 'upload_complete': + self.upload_complete, + } def from_dict(other, context): + req = ApiStartBlobUploadRequest() + req.from_dict(other['start_blob_upload_request']) new = ResumableFileUpload( other['path'], - StartBlobUploadRequest(**other['start_blob_upload_request']), context) + ApiStartBlobUploadRequest(**other['start_blob_upload_request']), + context) new.timestamp = other.get('timestamp') start_blob_upload_response = other.get('start_blob_upload_response') if start_blob_upload_response is not None: - new.start_blob_upload_response = StartBlobUploadResponse( + new.start_blob_upload_response = ApiStartBlobUploadResponse( **start_blob_upload_response) new.upload_complete = other.get('upload_complete') or False return new @@ -284,7 +250,7 @@ def __repr__(self): return self.to_str() -class KaggleApi(KaggleApi): +class KaggleApi: __version__ = '1.6.17' CONFIG_NAME_PROXY = 'proxy' @@ -332,35 +298,35 @@ class KaggleApi(KaggleApi): valid_list_kernel_types = ['all', 'script', 'notebook'] valid_list_output_types = ['all', 'visualization', 'data'] valid_list_sort_by = [ - 'hotness', 'commentCount', 'dateCreated', 'dateRun', 'relevance', - 'scoreAscending', 'scoreDescending', 'viewCount', 'voteCount' - ] + 'hotness', 'commentCount', 'dateCreated', 'dateRun', 'relevance', + 'scoreAscending', 'scoreDescending', 'viewCount', 'voteCount' + ] # Competitions valid types valid_competition_groups = [ - 'general', 'entered', 'community', 'hosted', 'unlaunched', - 'unlaunched_community' - ] + 'general', 'entered', 'community', 'hosted', 'unlaunched', + 'unlaunched_community' + ] valid_competition_categories = [ - 'all', 'featured', 'research', 'recruitment', 'gettingStarted', 'masters', - 'playground' - ] + 'all', 'featured', 'research', 'recruitment', 'gettingStarted', 'masters', + 'playground' + ] valid_competition_sort_by = [ - 'grouped', 'best', 'prize', 'earliestDeadline', 'latestDeadline', - 'numberOfTeams', 'relevance', 'recentlyCreated' - ] + 'grouped', 'best', 'prize', 'earliestDeadline', 'latestDeadline', + 'numberOfTeams', 'relevance', 'recentlyCreated' + ] # Datasets valid types valid_dataset_file_types = ['all', 'csv', 'sqlite', 'json', 'bigQuery'] valid_dataset_license_names = ['all', 'cc', 'gpl', 'odb', 'other'] valid_dataset_sort_bys = [ - 'hottest', 'votes', 'updated', 'active', 'published' - ] + 'hottest', 'votes', 'updated', 'active', 'published' + ] # Models valid types valid_model_sort_bys = [ - 'hotness', 'downloadCount', 'voteCount', 'notebookCount', 'createTime' - ] + 'hotness', 'downloadCount', 'voteCount', 'notebookCount', 'createTime' + ] # Command prefixes that are valid without authentication. command_prefixes_allowing_anonymous_access = ('datasets download', @@ -368,31 +334,31 @@ class KaggleApi(KaggleApi): # Attributes competition_fields = [ - 'ref', 'deadline', 'category', 'reward', 'teamCount', 'userHasEntered' - ] + 'ref', 'deadline', 'category', 'reward', 'teamCount', 'userHasEntered' + ] submission_fields = [ - 'fileName', 'date', 'description', 'status', 'publicScore', 'privateScore' - ] + 'fileName', 'date', 'description', 'status', 'publicScore', 'privateScore' + ] competition_file_fields = ['name', 'totalBytes', 'creationDate'] competition_file_labels = ['name', 'size', 'creationDate'] competition_leaderboard_fields = [ - 'teamId', 'teamName', 'submissionDate', 'score' - ] + 'teamId', 'teamName', 'submissionDate', 'score' + ] dataset_fields = [ - 'ref', 'title', 'totalBytes', 'lastUpdated', 'downloadCount', 'voteCount', - 'usabilityRating' - ] + 'ref', 'title', 'totalBytes', 'lastUpdated', 'downloadCount', 'voteCount', + 'usabilityRating' + ] dataset_labels = [ - 'ref', 'title', 'size', 'lastUpdated', 'downloadCount', 'voteCount', - 'usabilityRating' - ] - dataset_file_fields = ['name', 'size', - 'creationDate'] # TODO databundle_file_files? + 'ref', 'title', 'size', 'lastUpdated', 'downloadCount', 'voteCount', + 'usabilityRating' + ] + dataset_file_fields = ['name', 'total_bytes', 'creationDate'] model_fields = ['id', 'ref', 'title', 'subtitle', 'author'] model_all_fields = [ - 'id', 'ref', 'author', 'slug', 'title', 'subtitle', 'isPrivate', - 'description', 'publishTime' - ] + 'id', 'ref', 'author', 'slug', 'title', 'subtitle', 'isPrivate', + 'description', 'publishTime' + ] + model_file_fields = ['name', 'size', 'creationDate'] def _is_retriable(self, e): return issubclass(type(e), ConnectionError) or \ @@ -456,7 +422,7 @@ def authenticate(self): config_data = self.read_config_file(config_data) elif self._is_help_or_version_command(api_command) or (len( sys.argv) > 2 and api_command.startswith( - self.command_prefixes_allowing_anonymous_access)): + self.command_prefixes_allowing_anonymous_access)): # Some API commands should be allowed without authentication. return else: @@ -464,7 +430,7 @@ def authenticate(self): ' {}. Or use the environment method. See setup' ' instructions at' ' https://github.com/Kaggle/kaggle-api/'.format( - self.config_file, self.config_dir)) + self.config_file, self.config_dir)) # Step 3: load into configuration! self._load_config(config_data) @@ -541,22 +507,6 @@ def _load_config(self, config_data): self.config_values = config_data - try: - self.api_client = ApiClient(configuration) - - except Exception as error: - - if 'Proxy' in type(error).__name__: - raise ValueError('The specified proxy ' + - config_data[self.CONFIG_NAME_PROXY] + - ' is not valid, please check your proxy settings') - else: - raise ValueError( - 'Unauthorized: you must download an API key or export ' - 'credentials to the environment. Please see\n ' + - 'https://github.com/Kaggle/kaggle-api#api-credentials ' + - 'for instructions.') - def read_config_file(self, config_data=None, quiet=False): """read_config_file is the first effort to get a username and key to authenticate to the Kaggle API. Since we can get the @@ -727,12 +677,12 @@ def build_kaggle_client(self): else KaggleEnv.LOCAL if '--local' in self.args \ else KaggleEnv.PROD verbose = '--verbose' in self.args or '-v' in self.args - config = self.api_client.configuration + # config = self.api_client.configuration return KaggleClient( env=env, verbose=verbose, - username=config.username, - password=config.password) + username=self.config_values['username'], + password=self.config_values['key']) def camel_to_snake(self, name): """ @@ -781,7 +731,7 @@ def competitions_list(self, raise ValueError('Invalid group specified. Valid options are ' + str(self.valid_competition_groups)) if group == 'all': - group = CompetitionListTab.COMPETITION_LIST_TAB_DEFAULT + group = CompetitionListTab.COMPETITION_LIST_TAB_EVERYTHING else: group = self.lookup_enum(CompetitionListTab, group) @@ -902,8 +852,8 @@ def competition_submit_cli(self, try: submit_result = self.competition_submit(file_name, message, competition, quiet) - except ApiException as e: - if e.status == 404: + except RequestException as e: + if e.response and e.response.status_code == 404: print('Could not find competition - please verify that you ' 'entered the correct competition ID and that the ' 'competition is still accepting submissions.') @@ -1068,7 +1018,8 @@ def competition_download_file(self, outfile = os.path.join(effective_path, url.split('?')[0].split('/')[-1]) if force or self.download_needed(response, outfile, quiet): - self.download_file(response, outfile, quiet, not force) + self.download_file(response, outfile, kaggle.http_client(), quiet, + not force) def competition_download_files(self, competition, @@ -1378,25 +1329,43 @@ def dataset_metadata_update(self, dataset, path): with open(meta_file, 'r') as f: s = json.load(f) metadata = json.loads(s) - updateSettingsRequest = DatasetUpdateSettingsRequest( - title=metadata.get('title') or '', - subtitle=metadata.get('subtitle') or '', - description=metadata.get('description') or '', - is_private=metadata.get('isPrivate') or False, - licenses=[License(name=l['name']) for l in metadata['licenses']] - if metadata.get('licenses') else [], - keywords=metadata.get('keywords'), - collaborators=[ - Collaborator(username=c['username'], role=c['role']) - for c in metadata['collaborators'] - ] if metadata.get('collaborators') else [], - data=metadata.get('data')) - result = self.process_response( - self.metadata_post_with_http_info(owner_slug, dataset_slug, - updateSettingsRequest)) - if len(result['errors']) > 0: - [print(e['message']) for e in result['errors']] - exit(1) + update_settings = DatasetSettings() + update_settings.title = metadata.get('title') or '' + update_settings.subtitle = metadata.get('subtitle') or '' + update_settings.description = metadata.get('description') or '' + update_settings.is_private = metadata.get('isPrivate') or False + update_settings.licenses = [ + self._new_license(l['name']) for l in metadata['licenses'] + ] if metadata.get('licenses') else [] + update_settings.keywords = metadata.get('keywords') + update_settings.collaborators = [ + self._new_collaborator(c['username'], c['role']) + for c in metadata['collaborators'] + ] if metadata.get('collaborators') else [] + update_settings.data = metadata.get('data') + request = ApiUpdateDatasetMetadataRequest() + request.owner_slug = owner_slug + request.dataset_slug = dataset_slug + request.settings = update_settings + with self.build_kaggle_client() as kaggle: + response = kaggle.datasets.dataset_api_client.update_dataset_metadata( + request) + if len(response.errors) > 0: + [print(e['message']) for e in response.errors] + exit(1) + + @staticmethod + def _new_license(name): + l = SettingsLicense() + l.name = name + return l + + @staticmethod + def _new_collaborator(name, role): + u = DatasetCollaborator() + u.username = name + u.role = role + return u def dataset_metadata(self, dataset, path): (owner_slug, dataset_slug, @@ -1457,7 +1426,7 @@ def dataset_list_files(self, dataset, page_token=None, page_size=20): request.page_token = page_token request.page_size = page_size response = kaggle.datasets.dataset_api_client.list_dataset_files(request) - return ListFilesResult(response) + return response def dataset_list_files_cli(self, dataset, @@ -1483,7 +1452,7 @@ def dataset_list_files_cli(self, if result.error_message: print(result.error_message) else: - next_page_token = result.nextPageToken + next_page_token = result.next_page_token if next_page_token: print('Next Page Token = {}'.format(next_page_token)) fields = ['name', 'size', 'creationDate'] @@ -1638,17 +1607,17 @@ def dataset_download_files(self, raise ValueError( f"The file {outfile} is corrupted or not a valid zip file. " "Please report this issue at https://www.github.com/kaggle/kaggle-api" - ) + ) except FileNotFoundError: raise FileNotFoundError( f"The file {outfile} was not found. " "Please report this issue at https://www.github.com/kaggle/kaggle-api" - ) + ) except Exception as e: raise RuntimeError( f"An unexpected error occurred: {e}. " "Please report this issue at https://www.github.com/kaggle/kaggle-api" - ) + ) try: os.remove(outfile) @@ -1700,14 +1669,14 @@ def dataset_download_cli(self, # license_objs format is like: [{ 'name': 'CC0-1.0' }] license_objs = metadata['info']['licenses'] licenses = [ - license_obj['name'] - for license_obj in license_objs - if 'name' in license_obj - ] + license_obj['name'] + for license_obj in license_objs + if 'name' in license_obj + ] else: licenses = [ - 'Error retrieving license. Please visit the Dataset URL to view license information.' - ] + 'Error retrieving license. Please visit the Dataset URL to view license information.' + ] if file_name is None: self.dataset_download_files( @@ -1740,11 +1709,11 @@ def _upload_blob(self, path, quiet, blob_type, upload_context): content_length = os.path.getsize(path) last_modified_epoch_seconds = int(os.path.getmtime(path)) - start_blob_upload_request = StartBlobUploadRequest( - blob_type, - file_name, - content_length, - last_modified_epoch_seconds=last_modified_epoch_seconds) + start_blob_upload_request = ApiStartBlobUploadRequest() + start_blob_upload_request.type = blob_type + start_blob_upload_request.name = file_name + start_blob_upload_request.content_length = content_length + start_blob_upload_request.last_modified_epoch_seconds = last_modified_epoch_seconds file_upload = upload_context.new_resumable_file_upload( path, start_blob_upload_request) @@ -1755,10 +1724,11 @@ def _upload_blob(self, path, quiet, blob_type, upload_context): if not file_upload.can_resume: # Initiate upload on Kaggle backend to get the url and token. - start_blob_upload_response = self.process_response( - self.with_retry(self.upload_file_with_http_info)( - file_upload.start_blob_upload_request)) - file_upload.upload_initiated(start_blob_upload_response) + with self.build_kaggle_client() as kaggle: + method = kaggle.blobs.blob_api_client.start_blob_upload + start_blob_upload_response = self.with_retry(method)( + file_upload.start_blob_upload_request) + file_upload.upload_initiated(start_blob_upload_response) upload_result = self.upload_complete( path, @@ -1805,7 +1775,7 @@ def dataset_create_version(self, if not ref and not id_no: raise ValueError('ID or slug must be specified in the metadata') elif ref and ref == self.config_values[ - self.CONFIG_NAME_USER] + '/INSERT_SLUG_HERE': + self.CONFIG_NAME_USER] + '/INSERT_SLUG_HERE': raise ValueError( 'Default slug detected, please change values before uploading') @@ -1846,8 +1816,8 @@ def dataset_create_version(self, self.upload_files(body, resources, folder, ApiBlobType.DATASET, upload_context, quiet, dir_mode) request.body.files = [ - self._api_dataset_new_file(file) for file in request.body.files - ] + self._api_dataset_new_file(file) for file in request.body.files + ] response = self.with_retry(message)(request) return response @@ -1978,20 +1948,19 @@ def dataset_create_new(self, if subtitle and (len(subtitle) < 20 or len(subtitle) > 80): raise ValueError('Subtitle length must be between 20 and 80 characters') - request = DatasetNewRequest( - title=title, - slug=dataset_slug, - owner_slug=owner_slug, - license_name=license_name, - subtitle=subtitle, - description=description, - files=[], - is_private=not public, - convert_to_csv=convert_to_csv, - category_ids=keywords) + request = ApiCreateDatasetRequest() + request.title = title + request.slug = dataset_slug + request.owner_slug = owner_slug + request.license_name = license_name + request.subtitle = subtitle + request.description = description + request.files = [] + request.is_private = not public + # request.convert_to_csv=convert_to_csv + request.category_ids = keywords with ResumableUploadContext() as upload_context: - # TODO Change upload_files() to use ApiCreateDatasetRequest self.upload_files(request, resources, folder, ApiBlobType.DATASET, upload_context, quiet, dir_mode) @@ -2004,13 +1973,13 @@ def dataset_create_new(self, retry_request.subtitle = subtitle retry_request.description = description retry_request.files = [ - self._api_dataset_new_file(file) for file in request.files - ] + self._api_dataset_new_file(file) for file in request.files + ] retry_request.is_private = not public retry_request.category_ids = keywords response = self.with_retry( kaggle.datasets.dataset_api_client.create_dataset)( - retry_request) + retry_request) return response def dataset_create_new_cli(self, @@ -2047,6 +2016,7 @@ def dataset_create_new_cli(self, def download_file(self, response, outfile, + http_client, quiet=True, resume=False, chunk_size=1048576): @@ -2056,6 +2026,7 @@ def download_file(self, ========== response: the response to download outfile: the output file to download to + http_client: the Kaggle http client to use quiet: suppress verbose output (default is True) chunk_size: the size of the chunk to stream resume: whether to resume an existing download @@ -2080,7 +2051,7 @@ def download_file(self, file_exists = os.path.isfile(outfile) resumable = 'Accept-Ranges' in response.headers and response.headers[ - 'Accept-Ranges'] == 'bytes' + 'Accept-Ranges'] == 'bytes' if resume and resumable and file_exists: size_read = os.path.getsize(outfile) @@ -2088,14 +2059,14 @@ def download_file(self, if not quiet: print("... resuming from %d bytes (%d bytes left) ..." % ( - size_read, - size - size_read, - )) - - request_history = response.retries.history[0] - response = self.api_client.request( - request_history.method, - request_history.redirect_location, + size_read, + size - size_read, + )) + + request_history = response.history[0] + response = http_client.call( + request_history.request.method, + request_history.headers['location'], headers={'Range': 'bytes=%d-' % (size_read,)}, _preload_content=False) @@ -2352,31 +2323,31 @@ def kernels_initialize(self, folder): username = self.get_config_value(self.CONFIG_NAME_USER) meta_data = { - 'id': - username + '/INSERT_KERNEL_SLUG_HERE', - 'title': - 'INSERT_TITLE_HERE', - 'code_file': - 'INSERT_CODE_FILE_PATH_HERE', - 'language': - 'Pick one of: {' + - ','.join(x for x in self.valid_push_language_types) + '}', - 'kernel_type': - 'Pick one of: {' + - ','.join(x for x in self.valid_push_kernel_types) + '}', - 'is_private': - 'true', - 'enable_gpu': - 'false', - 'enable_tpu': - 'false', - 'enable_internet': - 'true', - 'dataset_sources': [], - 'competition_sources': [], - 'kernel_sources': [], - 'model_sources': [], - } + 'id': + username + '/INSERT_KERNEL_SLUG_HERE', + 'title': + 'INSERT_TITLE_HERE', + 'code_file': + 'INSERT_CODE_FILE_PATH_HERE', + 'language': + 'Pick one of: {' + + ','.join(x for x in self.valid_push_language_types) + '}', + 'kernel_type': + 'Pick one of: {' + + ','.join(x for x in self.valid_push_kernel_types) + '}', + 'is_private': + 'true', + 'enable_gpu': + 'false', + 'enable_tpu': + 'false', + 'enable_internet': + 'true', + 'dataset_sources': [], + 'competition_sources': [], + 'kernel_sources': [], + 'model_sources': [], + } meta_file = os.path.join(folder, self.KERNEL_METADATA_FILE) with open(meta_file, 'w') as f: json.dump(meta_data, f, indent=2) @@ -2853,7 +2824,7 @@ def model_get_cli(self, model, folder=None): data['slug'] = model_ref_split[1] data['title'] = model.title data['subtitle'] = model.subtitle - data['isPrivate'] = model.isPrivate # TODO Add a test to ensure default is True + data['isPrivate'] = model.isPrivate # TODO Test to ensure True default data['description'] = model.description data['publishTime'] = model.publishTime @@ -2937,18 +2908,18 @@ def model_initialize(self, folder): raise ValueError('Invalid folder: ' + folder) meta_data = { - 'ownerSlug': - 'INSERT_OWNER_SLUG_HERE', - 'title': - 'INSERT_TITLE_HERE', - 'slug': - 'INSERT_SLUG_HERE', - 'subtitle': - '', - 'isPrivate': - True, - 'description': - '''# Model Summary + 'ownerSlug': + 'INSERT_OWNER_SLUG_HERE', + 'title': + 'INSERT_TITLE_HERE', + 'slug': + 'INSERT_SLUG_HERE', + 'subtitle': + '', + 'isPrivate': + True, + 'description': + '''# Model Summary # Model Characteristics @@ -2956,11 +2927,11 @@ def model_initialize(self, folder): # Evaluation Results ''', - 'publishTime': - '', - 'provenanceSources': - '' - } + 'publishTime': + '', + 'provenanceSources': + '' + } meta_file = os.path.join(folder, self.MODEL_METADATA_FILE) with open(meta_file, 'w') as f: json.dump(meta_data, f, indent=2) @@ -3202,20 +3173,20 @@ def model_instance_get_cli(self, model_instance, folder=None): model_instance) data = { - 'id': mi.id, - 'ownerSlug': owner_slug, - 'modelSlug': model_slug, - 'instanceSlug': mi.slug, - 'framework': self.short_enum_name(mi.framework), - 'overview': mi.overview, - 'usage': mi.usage, - 'licenseName': mi.license_name, - 'fineTunable': mi.fine_tunable, - 'trainingData': mi.training_data, - 'versionId': mi.version_id, - 'versionNumber': mi.version_number, - 'modelInstanceType': self.short_enum_name(mi.model_instance_type) - } + 'id': mi.id, + 'ownerSlug': owner_slug, + 'modelSlug': model_slug, + 'instanceSlug': mi.slug, + 'framework': self.short_enum_name(mi.framework), + 'overview': mi.overview, + 'usage': mi.usage, + 'licenseName': mi.license_name, + 'fineTunable': mi.fine_tunable, + 'trainingData': mi.training_data, + 'versionId': mi.version_id, + 'versionNumber': mi.version_number, + 'modelInstanceType': self.short_enum_name(mi.model_instance_type) + } if mi.base_model_instance_information is not None: # TODO Test this. data['baseModelInstance'] = '{}/{}/{}/{}'.format( @@ -3239,18 +3210,18 @@ def model_instance_initialize(self, folder): raise ValueError('Invalid folder: ' + folder) meta_data = { - 'ownerSlug': - 'INSERT_OWNER_SLUG_HERE', - 'modelSlug': - 'INSERT_EXISTING_MODEL_SLUG_HERE', - 'instanceSlug': - 'INSERT_INSTANCE_SLUG_HERE', - 'framework': - 'INSERT_FRAMEWORK_HERE', - 'overview': - '', - 'usage': - '''# Model Format + 'ownerSlug': + 'INSERT_OWNER_SLUG_HERE', + 'modelSlug': + 'INSERT_EXISTING_MODEL_SLUG_HERE', + 'instanceSlug': + 'INSERT_INSTANCE_SLUG_HERE', + 'framework': + 'INSERT_FRAMEWORK_HERE', + 'overview': + '', + 'usage': + '''# Model Format # Training Data @@ -3264,18 +3235,18 @@ def model_instance_initialize(self, folder): # Changelog ''', - 'licenseName': - 'Apache 2.0', - 'fineTunable': - False, - 'trainingData': [], - 'modelInstanceType': - 'Unspecified', - 'baseModelInstanceId': - 0, - 'externalBaseModelUrl': - '' - } + 'licenseName': + 'Apache 2.0', + 'fineTunable': + False, + 'trainingData': [], + 'modelInstanceType': + 'Unspecified', + 'baseModelInstanceId': + 0, + 'externalBaseModelUrl': + '' + } meta_file = os.path.join(folder, self.MODEL_INSTANCE_METADATA_FILE) with open(meta_file, 'w') as f: json.dump(meta_data, f, indent=2) @@ -3330,7 +3301,7 @@ def model_instance_create(self, folder, quiet=False, dir_mode='skip'): if instance_slug == 'INSERT_INSTANCE_SLUG_HERE': raise ValueError( 'Default instanceSlug detected, please change values before uploading' - ) + ) if framework == 'INSERT_FRAMEWORK_HERE': raise ValueError( 'Default framework detected, please change values before uploading') @@ -3365,8 +3336,8 @@ def model_instance_create(self, folder, quiet=False, dir_mode='skip'): self.upload_files(body, None, folder, ApiBlobType.MODEL, upload_context, quiet, dir_mode) request.body.files = [ - self._api_dataset_new_file(file) for file in request.body.files - ] + self._api_dataset_new_file(file) for file in request.body.files + ] response = self.with_retry(message)(request) return response @@ -3538,7 +3509,7 @@ def model_instance_update(self, folder): if instance_slug == 'INSERT_INSTANCE_SLUG_HERE': raise ValueError( 'Default instance slug detected, please change values before uploading' - ) + ) if framework == 'INSERT_FRAMEWORK_HERE': raise ValueError( 'Default framework detected, please change values before uploading') @@ -3627,8 +3598,6 @@ def model_instance_version_create(self, owner_slug, model_slug, framework, instance_slug = self.split_model_instance_string( model_instance) - request = ModelInstanceNewVersionRequest( - version_notes=version_notes, files=[]) request = ApiCreateModelInstanceVersionRequest() request.owner_slug = owner_slug request.model_slug = model_slug @@ -3643,8 +3612,8 @@ def model_instance_version_create(self, self.upload_files(body, None, folder, ApiBlobType.MODEL, upload_context, quiet, dir_mode) request.body.files = [ - self._api_dataset_new_file(file) for file in request.body.files - ] + self._api_dataset_new_file(file) for file in request.body.files + ] response = self.with_retry(message)(request) return response @@ -3901,14 +3870,16 @@ def files_upload_cli(self, local_paths, inbox_path, no_resume, no_compress): if upload_file is None: continue - create_inbox_file_request = CreateInboxFileRequest( - virtual_directory=inbox_path, blob_file_token=upload_file.token) + create_inbox_file_request = CreateInboxFileRequest() + create_inbox_file_request.virtual_directory = inbox_path + create_inbox_file_request.blob_file_token = upload_file.token files_to_create.append((create_inbox_file_request, file_name)) - for (create_inbox_file_request, file_name) in files_to_create: - self.process_response( - self.with_retry(self.create_inbox_file)(create_inbox_file_request)) - print('Inbox file created:', file_name) + with self.build_kaggle_client() as kaggle: + create_inbox_file = kaggle.admin.inbox_file_client.create_inbox_file + for (create_inbox_file_request, file_name) in files_to_create: + self.with_retry(create_inbox_file)(create_inbox_file_request) + print('Inbox file created:', file_name) def file_upload_cli(self, local_path, inbox_path, no_compress, upload_context): @@ -3979,9 +3950,9 @@ def print_table(self, items, fields, labels=None): length = max( len(f), max([ - len(self.string(getattr(i, self.camel_to_snake(f)))) - for i in items - ])) + len(self.string(getattr(i, self.camel_to_snake(f)))) + for i in items + ])) justify = '>' if isinstance( getattr(items[0], self.camel_to_snake(f)), int) or f == 'size' or f == 'reward' else '<' @@ -3993,8 +3964,8 @@ def print_table(self, items, fields, labels=None): print(row_format.format(*borders)) for i in items: i_fields = [ - self.string(getattr(i, self.camel_to_snake(f))) + ' ' for f in fields - ] + self.string(getattr(i, self.camel_to_snake(f))) + ' ' for f in fields + ] try: print(row_format.format(*i_fields)) except UnicodeEncodeError: @@ -4015,8 +3986,8 @@ def print_csv(self, items, fields, labels=None): writer.writerow(labels) for i in items: i_fields = [ - self.string(getattr(i, self.camel_to_snake(f))) for f in fields - ] + self.string(getattr(i, self.camel_to_snake(f))) for f in fields + ] writer.writerow(i_fields) def string(self, item): @@ -4089,9 +4060,9 @@ def process_response(self, result): api_version = headers[self.HEADER_API_VERSION] if (not self.already_printed_version_warning and not self.is_up_to_date(api_version)): - print('Warning: Looks like you\'re using an outdated API ' - 'Version, please consider updating (server ' + api_version + - ' / client ' + self.__version__ + ')') + print(f'Warning: Looks like you\'re using an outdated `kaggle`` ' + 'version (installed: {self.__version__}, please consider ' + 'upgrading to the latest version ({api_version})') self.already_printed_version_warning = True if isinstance(data, dict) and 'code' in data and data['code'] != 200: raise Exception(data['message']) @@ -4150,10 +4121,10 @@ def upload_files(self, """ for file_name in os.listdir(folder): if (file_name in [ - self.DATASET_METADATA_FILE, self.OLD_DATASET_METADATA_FILE, - self.KERNEL_METADATA_FILE, self.MODEL_METADATA_FILE, - self.MODEL_INSTANCE_METADATA_FILE - ]): + self.DATASET_METADATA_FILE, self.OLD_DATASET_METADATA_FILE, + self.KERNEL_METADATA_FILE, self.MODEL_METADATA_FILE, + self.MODEL_INSTANCE_METADATA_FILE + ]): continue upload_file = self._upload_file_or_folder(folder, file_name, blob_type, upload_context, dir_mode, quiet, @@ -4293,11 +4264,11 @@ def upload_complete(self, path, url, quiet, resume=False): if start_at > 0: fp.seek(start_at) session.headers.update({ - 'Content-Length': - '%d' % upload_size, - 'Content-Range': - 'bytes %d-%d/%d' % (start_at, file_size - 1, file_size) - }) + 'Content-Length': + '%d' % upload_size, + 'Content-Range': + 'bytes %d-%d/%d' % (start_at, file_size - 1, file_size) + }) reader = TqdmBufferedReader(fp, progress_bar) retries = Retry(total=10, backoff_factor=0.5) adapter = HTTPAdapter(max_retries=retries) @@ -4320,9 +4291,9 @@ def _resume_upload(self, path, url, content_length, quiet): # Documentation: https://developers.google.com/drive/api/guides/manage-uploads#resume-upload session = requests.Session() session.headers.update({ - 'Content-Length': '0', - 'Content-Range': 'bytes */%d' % content_length, - }) + 'Content-Length': '0', + 'Content-Range': 'bytes */%d' % content_length, + }) response = session.put(url) @@ -4470,11 +4441,11 @@ def validate_model_instance_version_string(self, model_instance_version): raise ValueError( 'Model instance version must be specified in the form of ' '\'{owner}/{model-slug}/{framework}/{instance-slug}/{version-number}\'' - ) + ) split = model_instance_version.split('/') if not split[0] or not split[1] or not split[2] or not split[ - 3] or not split[4]: + 3] or not split[4]: raise ValueError('Invalid model instance version specification ' + model_instance_version) @@ -4516,7 +4487,7 @@ def validate_model_string(self, model): raise ValueError( 'Model must be specified in the form of ' '\'{username}/{model-slug}/{framework}/{variation-slug}/{version-number}\'' - ) + ) split = model.split('/') if not split[0] or not split[1]: @@ -4573,18 +4544,18 @@ def convert_to_dataset_file_metadata(self, file_data, path): path: the path to write the metadata to """ as_metadata = { - 'path': os.path.join(path, file_data['name']), - 'description': file_data['description'] - } + 'path': os.path.join(path, file_data['name']), + 'description': file_data['description'] + } schema = {} fields = [] for column in file_data['columns']: field = { - 'name': column['name'], - 'title': column['description'], - 'type': column['type'] - } + 'name': column['name'], + 'title': column['description'], + 'type': column['type'] + } fields.append(field) schema['fields'] = fields as_metadata['schema'] = schema diff --git a/kaggle/api_client.py b/kaggle/api_client.py deleted file mode 100644 index a13ef35..0000000 --- a/kaggle/api_client.py +++ /dev/null @@ -1,633 +0,0 @@ -#!/usr/bin/python -# -# Copyright 2024 Kaggle Inc -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 -""" - Kaggle API - - API for kaggle.com # noqa: E501 - - OpenAPI spec version: 1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - -from __future__ import absolute_import - -import datetime -import json -import mimetypes -from multiprocessing.pool import ThreadPool -import os -import re -import tempfile - -# python 2 and python 3 compatibility library -import six -from six.moves.urllib.parse import quote - -from kaggle.configuration import Configuration -import kaggle.models -from kaggle import rest - - -class ApiClient(object): - """Generic API client for Swagger client library builds. - - Swagger generic API client. This client handles the client- - server communication, and is invariant across implementations. Specifics of - the methods and models for each application are generated from the Swagger - templates. - - NOTE: This class is auto generated by the swagger code generator program. - Ref: https://github.com/swagger-api/swagger-codegen - Do not edit the class manually. - - :param configuration: .Configuration object for this client - :param header_name: a header to pass when making calls to the API. - :param header_value: a header value to pass when making calls to - the API. - :param cookie: a cookie to include in the header when making calls - to the API - """ - - PRIMITIVE_TYPES = (float, bool, bytes, six.text_type) + six.integer_types - NATIVE_TYPES_MAPPING = { - 'int': int, - 'long': int if six.PY3 else long, # noqa: F821 - 'float': float, - 'str': str, - 'bool': bool, - 'date': datetime.date, - 'datetime': datetime.datetime, - 'object': object, - } - - def __init__(self, configuration=None, header_name=None, header_value=None, - cookie=None): - if configuration is None: - configuration = Configuration() - self.configuration = configuration - - self.pool = ThreadPool() - self.rest_client = rest.RESTClientObject(configuration) - self.default_headers = {} - if header_name is not None: - self.default_headers[header_name] = header_value - self.cookie = cookie - # Set default User-Agent. - self.user_agent = 'Swagger-Codegen/1/python' - - @property - def user_agent(self): - """User agent for this API client""" - return self.default_headers['User-Agent'] - - @user_agent.setter - def user_agent(self, value): - self.default_headers['User-Agent'] = value - - def set_default_header(self, header_name, header_value): - self.default_headers[header_name] = header_value - - def __call_api( - self, resource_path, method, path_params=None, - query_params=None, header_params=None, body=None, post_params=None, - files=None, response_type=None, auth_settings=None, - _return_http_data_only=None, collection_formats=None, - _preload_content=True, _request_timeout=None): - - config = self.configuration - - # header parameters - header_params = header_params or {} - header_params.update(self.default_headers) - if self.cookie: - header_params['Cookie'] = self.cookie - if header_params: - header_params = self.sanitize_for_serialization(header_params) - header_params = dict(self.parameters_to_tuples(header_params, - collection_formats)) - - # path parameters - if path_params: - path_params = self.sanitize_for_serialization(path_params) - path_params = self.parameters_to_tuples(path_params, - collection_formats) - for k, v in path_params: - # specified safe chars, encode everything - resource_path = resource_path.replace( - '{%s}' % k, - quote(str(v), safe=config.safe_chars_for_path_param) - ) - - # query parameters - if query_params: - query_params = self.sanitize_for_serialization(query_params) - query_params = self.parameters_to_tuples(query_params, - collection_formats) - - # post parameters - if post_params or files: - post_params = self.prepare_post_parameters(post_params, files) - post_params = self.sanitize_for_serialization(post_params) - post_params = self.parameters_to_tuples(post_params, - collection_formats) - - # auth setting - self.update_params_for_auth(header_params, query_params, auth_settings) - - # body - if body: - body = self.sanitize_for_serialization(body) - - # request url - url = self.configuration.host + resource_path - - # perform request and return response - response_data = self.request( - method, url, query_params=query_params, headers=header_params, - post_params=post_params, body=body, - _preload_content=_preload_content, - _request_timeout=_request_timeout) - - self.last_response = response_data - - return_data = response_data - if _preload_content: - # deserialize response data - if response_type: - return_data = self.deserialize(response_data, response_type) - else: - return_data = None - - if _return_http_data_only: - return (return_data) - else: - return (return_data, response_data.status, - response_data.getheaders()) - - def sanitize_for_serialization(self, obj): - """Builds a JSON POST object. - - If obj is None, return None. - If obj is str, int, long, float, bool, return directly. - If obj is datetime.datetime, datetime.date - convert to string in iso8601 format. - If obj is list, sanitize each element in the list. - If obj is dict, return the dict. - If obj is swagger model, return the properties dict. - - :param obj: The data to serialize. - :return: The serialized form of data. - """ - if obj is None: - return None - elif isinstance(obj, self.PRIMITIVE_TYPES): - return obj - elif isinstance(obj, list): - return [self.sanitize_for_serialization(sub_obj) - for sub_obj in obj] - elif isinstance(obj, tuple): - return tuple(self.sanitize_for_serialization(sub_obj) - for sub_obj in obj) - elif isinstance(obj, (datetime.datetime, datetime.date)): - return obj.isoformat() - - if isinstance(obj, dict): - obj_dict = obj - else: - # Convert model obj to dict except - # attributes `swagger_types`, `attribute_map` - # and attributes which value is not None. - # Convert attribute name to json key in - # model definition for request. - obj_dict = {obj.attribute_map[attr]: getattr(obj, attr) - for attr, _ in six.iteritems(obj.swagger_types) - if getattr(obj, attr) is not None} - - return {key: self.sanitize_for_serialization(val) - for key, val in six.iteritems(obj_dict)} - - def deserialize(self, response, response_type): - """Deserializes response into an object. - - :param response: RESTResponse object to be deserialized. - :param response_type: class literal for - deserialized object, or string of class name. - - :return: deserialized object. - """ - # handle file downloading - # save response body into a tmp file and return the instance - if response_type == "file": - return self.__deserialize_file(response) - - # fetch data from response object - try: - data = json.loads(response.data) - except ValueError: - data = response.data - - return self.__deserialize(data, response_type) - - def __deserialize(self, data, klass): - """Deserializes dict, list, str into an object. - - :param data: dict, list or str. - :param klass: class literal, or string of class name. - - :return: object. - """ - if data is None: - return None - - if type(klass) == str: - if klass.startswith('list['): - sub_kls = re.match('list\[(.*)\]', klass).group(1) - return [self.__deserialize(sub_data, sub_kls) - for sub_data in data] - - if klass.startswith('dict('): - sub_kls = re.match('dict\(([^,]*), (.*)\)', klass).group(2) - return {k: self.__deserialize(v, sub_kls) - for k, v in six.iteritems(data)} - - # convert str to class - if klass in self.NATIVE_TYPES_MAPPING: - klass = self.NATIVE_TYPES_MAPPING[klass] - else: - klass = getattr(kaggle.models, klass) - - if klass in self.PRIMITIVE_TYPES: - return self.__deserialize_primitive(data, klass) - elif klass == object: - return self.__deserialize_object(data) - elif klass == datetime.date: - return self.__deserialize_date(data) - elif klass == datetime.datetime: - return self.__deserialize_datatime(data) - else: - return self.__deserialize_model(data, klass) - - def call_api(self, resource_path, method, - path_params=None, query_params=None, header_params=None, - body=None, post_params=None, files=None, - response_type=None, auth_settings=None, async_req=None, - _return_http_data_only=None, collection_formats=None, - _preload_content=True, _request_timeout=None): - """Makes the HTTP request (synchronous) and returns deserialized data. - - To make an async request, set the async_req parameter. - - :param resource_path: Path to method endpoint. - :param method: Method to call. - :param path_params: Path parameters in the url. - :param query_params: Query parameters in the url. - :param header_params: Header parameters to be - placed in the request header. - :param body: Request body. - :param post_params dict: Request post form parameters, - for `application/x-www-form-urlencoded`, `multipart/form-data`. - :param auth_settings list: Auth Settings names for the request. - :param response: Response data type. - :param files dict: key -> filename, value -> filepath, - for `multipart/form-data`. - :param async_req bool: execute request asynchronously - :param _return_http_data_only: response data without head status code - and headers - :param collection_formats: dict of collection formats for path, query, - header, and post parameters. - :param _preload_content: if False, the urllib3.HTTPResponse object will - be returned without reading/decoding response - data. Default is True. - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - :return: - If async_req parameter is True, - the request will be called asynchronously. - The method will return the request thread. - If parameter async_req is False or missing, - then the method will return the response directly. - """ - if not async_req: - return self.__call_api(resource_path, method, - path_params, query_params, header_params, - body, post_params, files, - response_type, auth_settings, - _return_http_data_only, collection_formats, - _preload_content, _request_timeout) - else: - thread = self.pool.apply_async(self.__call_api, (resource_path, - method, path_params, query_params, - header_params, body, - post_params, files, - response_type, auth_settings, - _return_http_data_only, - collection_formats, - _preload_content, _request_timeout)) - return thread - - def request(self, method, url, query_params=None, headers=None, - post_params=None, body=None, _preload_content=True, - _request_timeout=None): - """Makes the HTTP request using RESTClient.""" - if method == "GET": - return self.rest_client.GET(url, - query_params=query_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - headers=headers) - elif method == "HEAD": - return self.rest_client.HEAD(url, - query_params=query_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - headers=headers) - elif method == "OPTIONS": - return self.rest_client.OPTIONS(url, - query_params=query_params, - headers=headers, - post_params=post_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body) - elif method == "POST": - return self.rest_client.POST(url, - query_params=query_params, - headers=headers, - post_params=post_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body) - elif method == "PUT": - return self.rest_client.PUT(url, - query_params=query_params, - headers=headers, - post_params=post_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body) - elif method == "PATCH": - return self.rest_client.PATCH(url, - query_params=query_params, - headers=headers, - post_params=post_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body) - elif method == "DELETE": - return self.rest_client.DELETE(url, - query_params=query_params, - headers=headers, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body) - else: - raise ValueError( - "http method must be `GET`, `HEAD`, `OPTIONS`," - " `POST`, `PATCH`, `PUT` or `DELETE`." - ) - - def parameters_to_tuples(self, params, collection_formats): - """Get parameters as list of tuples, formatting collections. - - :param params: Parameters as dict or list of two-tuples - :param dict collection_formats: Parameter collection formats - :return: Parameters as list of tuples, collections formatted - """ - new_params = [] - if collection_formats is None: - collection_formats = {} - for k, v in six.iteritems(params) if isinstance(params, dict) else params: # noqa: E501 - if k in collection_formats: - collection_format = collection_formats[k] - if collection_format == 'multi': - new_params.extend((k, value) for value in v) - else: - if collection_format == 'ssv': - delimiter = ' ' - elif collection_format == 'tsv': - delimiter = '\t' - elif collection_format == 'pipes': - delimiter = '|' - else: # csv is the default - delimiter = ',' - new_params.append( - (k, delimiter.join(str(value) for value in v))) - else: - new_params.append((k, v)) - return new_params - - def prepare_post_parameters(self, post_params=None, files=None): - """Builds form parameters. - - :param post_params: Normal form parameters. - :param files: File parameters. - :return: Form parameters with files. - """ - params = [] - - if post_params: - params = post_params - - if files: - for k, v in six.iteritems(files): - if not v: - continue - file_names = v if type(v) is list else [v] - for n in file_names: - with open(n, 'rb') as f: - filename = os.path.basename(f.name) - filedata = f.read() - mimetype = (mimetypes.guess_type(filename)[0] or - 'application/octet-stream') - params.append( - tuple([k, tuple([filename, filedata, mimetype])])) - - return params - - def select_header_accept(self, accepts): - """Returns `Accept` based on an array of accepts provided. - - :param accepts: List of headers. - :return: Accept (e.g. application/json). - """ - if not accepts: - return - - accepts = [x.lower() for x in accepts] - - if 'application/json' in accepts: - return 'application/json' - else: - return ', '.join(accepts) - - def select_header_content_type(self, content_types): - """Returns `Content-Type` based on an array of content_types provided. - - :param content_types: List of content-types. - :return: Content-Type (e.g. application/json). - """ - if not content_types: - return 'application/json' - - content_types = [x.lower() for x in content_types] - - if 'application/json' in content_types or '*/*' in content_types: - return 'application/json' - else: - return content_types[0] - - def update_params_for_auth(self, headers, querys, auth_settings): - """Updates header and query params based on authentication setting. - - :param headers: Header parameters dict to be updated. - :param querys: Query parameters tuple list to be updated. - :param auth_settings: Authentication setting identifiers list. - """ - if not auth_settings: - return - - for auth in auth_settings: - auth_setting = self.configuration.auth_settings().get(auth) - if auth_setting: - if not auth_setting['value']: - continue - elif auth_setting['in'] == 'header': - headers[auth_setting['key']] = auth_setting['value'] - elif auth_setting['in'] == 'query': - querys.append((auth_setting['key'], auth_setting['value'])) - else: - raise ValueError( - 'Authentication token must be in `query` or `header`' - ) - - def __deserialize_file(self, response): - """Deserializes body to file - - Saves response body into a file in a temporary folder, - using the filename from the `Content-Disposition` header if provided. - - :param response: RESTResponse. - :return: file path. - """ - fd, path = tempfile.mkstemp(dir=self.configuration.temp_folder_path) - os.close(fd) - os.remove(path) - - content_disposition = response.getheader("Content-Disposition") - if content_disposition: - filename = re.search(r'filename=[\'"]?([^\'"\s]+)[\'"]?', - content_disposition).group(1) - path = os.path.join(os.path.dirname(path), filename) - - with open(path, "wb") as f: - f.write(response.data) - - return path - - def __deserialize_primitive(self, data, klass): - """Deserializes string to primitive type. - - :param data: str. - :param klass: class literal. - - :return: int, long, float, str, bool. - """ - try: - return klass(data) - except UnicodeEncodeError: - return six.text_type(data) - except TypeError: - return data - - def __deserialize_object(self, value): - """Return a original value. - - :return: object. - """ - return value - - def __deserialize_date(self, string): - """Deserializes string to date. - - :param string: str. - :return: date. - """ - try: - from dateutil.parser import parse - return parse(string).date() - except ImportError: - return string - except ValueError: - raise rest.ApiException( - status=0, - reason="Failed to parse `{0}` as date object".format(string) - ) - - def __deserialize_datatime(self, string): - """Deserializes string to datetime. - - The string should be in iso8601 datetime format. - - :param string: str. - :return: datetime. - """ - try: - from dateutil.parser import parse - return parse(string) - except ImportError: - return string - except ValueError: - raise rest.ApiException( - status=0, - reason=( - "Failed to parse `{0}` as datetime object" - .format(string) - ) - ) - - def __deserialize_model(self, data, klass): - """Deserializes list or dict to model. - - :param data: dict, list. - :param klass: class literal. - :return: model object. - """ - - if not klass.swagger_types and not hasattr(klass, - 'get_real_child_model'): - return data - - kwargs = {} - if klass.swagger_types is not None: - for attr, attr_type in six.iteritems(klass.swagger_types): - if (data is not None and - klass.attribute_map[attr] in data and - isinstance(data, (list, dict))): - value = data[klass.attribute_map[attr]] - kwargs[attr] = self.__deserialize(value, attr_type) - - instance = klass(**kwargs) - - if hasattr(instance, 'get_real_child_model'): - klass_name = instance.get_real_child_model(data) - if klass_name: - instance = self.__deserialize(data, klass_name) - return instance diff --git a/kaggle/cli.py b/kaggle/cli.py index 3995d7c..ac927f6 100644 --- a/kaggle/cli.py +++ b/kaggle/cli.py @@ -1,19 +1,3 @@ -#!/usr/bin/python -# -# Copyright 2024 Kaggle Inc -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - #!/usr/bin/python # # Copyright 2019 Kaggle Inc @@ -40,7 +24,8 @@ from kaggle import KaggleApi from kaggle import api -from .rest import ApiException +# from rest import ApiException +ApiException = IOError def main(): diff --git a/kaggle/configuration.py b/kaggle/configuration.py index 4308e17..356cc00 100644 --- a/kaggle/configuration.py +++ b/kaggle/configuration.py @@ -14,261 +14,195 @@ # See the License for the specific language governing permissions and # limitations under the License. -# coding: utf-8 - -""" - Kaggle API - - API for kaggle.com # noqa: E501 - - OpenAPI spec version: 1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - - -from __future__ import absolute_import - -import copy -import logging -import multiprocessing -import sys -import urllib3 - -import six -from six.moves import http_client as httplib - - -class TypeWithDefault(type): - def __init__(cls, name, bases, dct): - super(TypeWithDefault, cls).__init__(name, bases, dct) - cls._default = None - - def __call__(cls): - if cls._default is None: - cls._default = type.__call__(cls) - return copy.copy(cls._default) - - def set_default(cls, default): - cls._default = copy.copy(default) - - -class Configuration(six.with_metaclass(TypeWithDefault, object)): - """NOTE: This class is auto generated by the swagger code generator program. - - Ref: https://github.com/swagger-api/swagger-codegen - Do not edit the class manually. - """ - - def __init__(self): - """Constructor""" - # Default Base url - self.host = _get_endpoint_from_env() or "https://www.kaggle.com/api/v1" - # Temp file folder for downloading files - self.temp_folder_path = None - - # Authentication Settings - # dict to store API key(s) - self.api_key = {} - # dict to store API prefix (e.g. Bearer) - self.api_key_prefix = {} - # Username for HTTP basic authentication - self.username = "" - # Password for HTTP basic authentication - self.password = "" - - # Logging Settings - self.logger = {} - self.logger["package_logger"] = logging.getLogger("kaggle") - self.logger["urllib3_logger"] = logging.getLogger("urllib3") - # Log format - self.logger_format = '%(asctime)s %(levelname)s %(message)s' - # Log stream handler - self.logger_stream_handler = None - # Log file handler - self.logger_file_handler = None - # Debug file location - self.logger_file = None - # Debug switch - self.debug = False - - # SSL/TLS verification - # Set this to false to skip verifying SSL certificate when calling API - # from https server. - self.verify_ssl = True - # Set this to customize the certificate file to verify the peer. - self.ssl_ca_cert = None - # client certificate file - self.cert_file = None - # client key file - self.key_file = None - # Set this to True/False to enable/disable SSL hostname verification. - self.assert_hostname = None - - # urllib3 connection pool's maximum number of connections saved - # per pool. urllib3 uses 1 connection as default value, but this is - # not the best value when you are making a lot of possibly parallel - # requests to the same host, which is often the case here. - # cpu_count * 5 is used as default value to increase performance. - self.connection_pool_maxsize = multiprocessing.cpu_count() * 5 - - # Proxy URL - self.proxy = None - # Safe chars for path_param - self.safe_chars_for_path_param = '' - - @property - def logger_file(self): - """The logger file. - - If the logger_file is None, then add stream handler and remove file - handler. Otherwise, add file handler and remove stream handler. - - :param value: The logger_file path. - :type: str - """ - return self.__logger_file - - @logger_file.setter - def logger_file(self, value): - """The logger file. - - If the logger_file is None, then add stream handler and remove file - handler. Otherwise, add file handler and remove stream handler. - - :param value: The logger_file path. - :type: str - """ - self.__logger_file = value - if self.__logger_file: - # If set logging file, - # then add file handler and remove stream handler. - self.logger_file_handler = logging.FileHandler(self.__logger_file) - self.logger_file_handler.setFormatter(self.logger_formatter) - for _, logger in six.iteritems(self.logger): - logger.addHandler(self.logger_file_handler) - if self.logger_stream_handler: - logger.removeHandler(self.logger_stream_handler) - else: - # If not set logging file, - # then add stream handler and remove file handler. - self.logger_stream_handler = logging.StreamHandler() - self.logger_stream_handler.setFormatter(self.logger_formatter) - for _, logger in six.iteritems(self.logger): - logger.addHandler(self.logger_stream_handler) - if self.logger_file_handler: - logger.removeHandler(self.logger_file_handler) - - @property - def debug(self): - """Debug status - - :param value: The debug status, True or False. - :type: bool - """ - return self.__debug - - @debug.setter - def debug(self, value): - """Debug status - - :param value: The debug status, True or False. - :type: bool - """ - self.__debug = value - if self.__debug: - # if debug status is True, turn on debug logging - for _, logger in six.iteritems(self.logger): - logger.setLevel(logging.DEBUG) - # turn on httplib debug - httplib.HTTPConnection.debuglevel = 1 - else: - # if debug status is False, turn off debug logging, - # setting log level to default `logging.WARNING` - for _, logger in six.iteritems(self.logger): - logger.setLevel(logging.WARNING) - # turn off httplib debug - httplib.HTTPConnection.debuglevel = 0 - - @property - def logger_format(self): - """The logger format. - - The logger_formatter will be updated when sets logger_format. - - :param value: The format string. - :type: str - """ - return self.__logger_format - - @logger_format.setter - def logger_format(self, value): - """The logger format. - - The logger_formatter will be updated when sets logger_format. - - :param value: The format string. - :type: str - """ - self.__logger_format = value - self.logger_formatter = logging.Formatter(self.__logger_format) - - def get_api_key_with_prefix(self, identifier): - """Gets API key (with prefix if set). - - :param identifier: The identifier of apiKey. - :return: The token for api key authentication. - """ - if (self.api_key.get(identifier) and - self.api_key_prefix.get(identifier)): - return self.api_key_prefix[identifier] + ' ' + self.api_key[identifier] # noqa: E501 - elif self.api_key.get(identifier): - return self.api_key[identifier] - - def get_basic_auth_token(self): - """Gets HTTP basic authentication header (string). - - :return: The token for basic HTTP authentication. - """ - return urllib3.util.make_headers( - basic_auth=self.username + ':' + self.password - ).get('authorization') - - def auth_settings(self): - """Gets Auth Settings dict for api client. - - :return: The Auth Settings information dict. - """ - return { - 'basicAuth': - { - 'type': 'basic', - 'in': 'header', - 'key': 'Authorization', - 'value': self.get_basic_auth_token() - }, - - } - - def to_debug_report(self): - """Gets the essential information for debugging. - - :return: The report for debugging. - """ - return "Python SDK Debug Report:\n"\ - "OS: {env}\n"\ - "Python Version: {pyversion}\n"\ - "Version of the API: 1\n"\ - "SDK Package Version: 1".\ - format(env=sys.platform, pyversion=sys.version) +# coding: utf-8 +from __future__ import absolute_import + +import logging +import six +import sys +import urllib3 + + +class Configuration: + + def __init__(self): + """Constructor""" + # Default Base url + self.host = _get_endpoint_from_env() or "https://www.kaggle.com/api/v1" + + # Authentication Settings + # dict to store API key(s) + self.api_key = {} + # dict to store API prefix (e.g. Bearer) + self.api_key_prefix = {} + # Username for HTTP basic authentication + self.username = "" + # Password for HTTP basic authentication + self.password = "" + + # Logging Settings + self.logger = {"package_logger": logging.getLogger("kaggle"), + "urllib3_logger": logging.getLogger("urllib3")} + # Log format + self.logger_format = '%(asctime)s %(levelname)s %(message)s' + # Log stream handler + self.logger_stream_handler = None + # Log file handler + self.logger_file_handler = None + # Debug file location + self.logger_file = None + # Debug switch + self.debug = False + + + @property + def logger_file(self): + """The logger file. + + If the logger_file is None, then add stream handler and remove file + handler. Otherwise, add file handler and remove stream handler. + + :param value: The logger_file path. + :type: str + """ + return self.__logger_file + + @logger_file.setter + def logger_file(self, value): + """The logger file. + + If the logger_file is None, then add stream handler and remove file + handler. Otherwise, add file handler and remove stream handler. + + :param value: The logger_file path. + :type: str + """ + self.__logger_file = value + if self.__logger_file: + # If set logging file, + # then add file handler and remove stream handler. + self.logger_file_handler = logging.FileHandler(self.__logger_file) + self.logger_file_handler.setFormatter(self.logger_formatter) + for _, logger in six.iteritems(self.logger): + logger.addHandler(self.logger_file_handler) + if self.logger_stream_handler: + logger.removeHandler(self.logger_stream_handler) + else: + # If not set logging file, + # then add stream handler and remove file handler. + self.logger_stream_handler = logging.StreamHandler() + self.logger_stream_handler.setFormatter(self.logger_formatter) + for _, logger in six.iteritems(self.logger): + logger.addHandler(self.logger_stream_handler) + if self.logger_file_handler: + logger.removeHandler(self.logger_file_handler) + + @property + def debug(self): + """Debug status + + :param value: The debug status, True or False. + :type: bool + """ + return self.__debug + + @debug.setter + def debug(self, value): + """Debug status + + :param value: The debug status, True or False. + :type: bool + """ + self.__debug = value + if self.__debug: + # if debug status is True, turn on debug logging + for _, logger in six.iteritems(self.logger): + logger.setLevel(logging.DEBUG) + else: + # if debug status is False, turn off debug logging, + # setting log level to default `logging.WARNING` + for _, logger in six.iteritems(self.logger): + logger.setLevel(logging.WARNING) + + @property + def logger_format(self): + """The logger format. + + The logger_formatter will be updated when sets logger_format. + + :param value: The format string. + :type: str + """ + return self.__logger_format + + @logger_format.setter + def logger_format(self, value): + """The logger format. + + The logger_formatter will be updated when sets logger_format. + + :param value: The format string. + :type: str + """ + self.__logger_format = value + self.logger_formatter = logging.Formatter(self.__logger_format) + + def get_api_key_with_prefix(self, identifier): + """Gets API key (with prefix if set). + + :param identifier: The identifier of apiKey. + :return: The token for api key authentication. + """ + if (self.api_key.get(identifier) and + self.api_key_prefix.get(identifier)): + return self.api_key_prefix[identifier] + ' ' + self.api_key[identifier] # noqa: E501 + elif self.api_key.get(identifier): + return self.api_key[identifier] + + def get_basic_auth_token(self): + """Gets HTTP basic authentication header (string). + + :return: The token for basic HTTP authentication. + """ + return urllib3.util.make_headers( + basic_auth=self.username + ':' + self.password + ).get('authorization') + + def auth_settings(self): + """Gets Auth Settings dict for api client. + + :return: The Auth Settings information dict. + """ + return { + 'basicAuth': + { + 'type': 'basic', + 'in': 'header', + 'key': 'Authorization', + 'value': self.get_basic_auth_token() + }, + + } + + def to_debug_report(self): + """Gets the essential information for debugging. + + :return: The report for debugging. + """ + return "Python SDK Debug Report:\n" \ + "OS: {env}\n" \ + "Python Version: {pyversion}\n" \ + "Version of the API: 1\n" \ + "SDK Package Version: 1". \ + format(env=sys.platform, pyversion=sys.version) def _get_endpoint_from_env(): - import os - endpoint = os.environ.get("KAGGLE_API_ENDPOINT") - if endpoint is None: - return None - endpoint = endpoint.rstrip("/") - if endpoint.endswith("/api/v1"): - return endpoint - return endpoint + "/api/v1" + import os + endpoint = os.environ.get("KAGGLE_API_ENDPOINT") + if endpoint is None: + return None + endpoint = endpoint.rstrip("/") + if endpoint.endswith("/api/v1"): + return endpoint + return endpoint + "/api/v1" diff --git a/kaggle/models/__init__.py b/kaggle/models/__init__.py deleted file mode 100644 index 999007e..0000000 --- a/kaggle/models/__init__.py +++ /dev/null @@ -1,52 +0,0 @@ -#!/usr/bin/python -# -# Copyright 2024 Kaggle Inc -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 - -# flake8: noqa -""" - Kaggle API - - API for kaggle.com # noqa: E501 - - OpenAPI spec version: 1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - - -from __future__ import absolute_import - -# import models into model package -from kaggle.models.api_blob_type import ApiBlobType -from kaggle.models.collaborator import Collaborator -from kaggle.models.create_inbox_file_request import CreateInboxFileRequest -from kaggle.models.dataset_column import DatasetColumn -from kaggle.models.dataset_new_request import DatasetNewRequest -from kaggle.models.dataset_new_version_request import DatasetNewVersionRequest -from kaggle.models.dataset_update_settings_request import DatasetUpdateSettingsRequest -from kaggle.models.error import Error -from kaggle.models.kernel_push_request import KernelPushRequest -from kaggle.models.license import License -from kaggle.models.model_instance_new_version_request import ModelInstanceNewVersionRequest -from kaggle.models.model_instance_update_request import ModelInstanceUpdateRequest -from kaggle.models.model_new_instance_request import ModelNewInstanceRequest -from kaggle.models.model_new_request import ModelNewRequest -from kaggle.models.model_update_request import ModelUpdateRequest -from kaggle.models.result import Result -from kaggle.models.start_blob_upload_request import StartBlobUploadRequest -from kaggle.models.start_blob_upload_response import StartBlobUploadResponse -from kaggle.models.upload_file import UploadFile diff --git a/kaggle/models/api_blob_type.py b/kaggle/models/api_blob_type.py index dc6e44e..1c4c95e 100644 --- a/kaggle/models/api_blob_type.py +++ b/kaggle/models/api_blob_type.py @@ -1,107 +1,4 @@ -#!/usr/bin/python -# -# Copyright 2024 Kaggle Inc -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 - -""" - Kaggle API - - API for kaggle.com # noqa: E501 - - OpenAPI spec version: 1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - - -import pprint -import re # noqa: F401 - -import six - - -class ApiBlobType(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - - """ - allowed enum values - """ - DATASET = "dataset" - MODEL = "model" - INBOX = "inbox" - - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - } - - attribute_map = { - } - - def __init__(self): # noqa: E501 - """ApiBlobType - a model defined in Swagger""" # noqa: E501 - self.discriminator = None - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, ApiBlobType): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other +class ApiBlobType(object): + DATASET = "dataset" + MODEL = "model" + INBOX = "inbox" diff --git a/kaggle/models/create_inbox_file_request.py b/kaggle/models/create_inbox_file_request.py deleted file mode 100644 index cc40603..0000000 --- a/kaggle/models/create_inbox_file_request.py +++ /dev/null @@ -1,160 +0,0 @@ -#!/usr/bin/python -# -# Copyright 2024 Kaggle Inc -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 - -""" - Kaggle API - - API for kaggle.com # noqa: E501 - - OpenAPI spec version: 1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - - -import pprint -import re # noqa: F401 - -import six - - -class CreateInboxFileRequest(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'virtual_directory': 'str', - 'blob_file_token': 'str' - } - - attribute_map = { - 'virtual_directory': 'virtualDirectory', - 'blob_file_token': 'blobFileToken' - } - - def __init__(self, virtual_directory=None, blob_file_token=None): # noqa: E501 - """CreateInboxFileRequest - a model defined in Swagger""" # noqa: E501 - - self._virtual_directory = None - self._blob_file_token = None - self.discriminator = None - - self.virtual_directory = virtual_directory - self.blob_file_token = blob_file_token - - @property - def virtual_directory(self): - """Gets the virtual_directory of this CreateInboxFileRequest. # noqa: E501 - - Directory name used for tagging the uploaded file # noqa: E501 - - :return: The virtual_directory of this CreateInboxFileRequest. # noqa: E501 - :rtype: str - """ - return self._virtual_directory - - @virtual_directory.setter - def virtual_directory(self, virtual_directory): - """Sets the virtual_directory of this CreateInboxFileRequest. - - Directory name used for tagging the uploaded file # noqa: E501 - - :param virtual_directory: The virtual_directory of this CreateInboxFileRequest. # noqa: E501 - :type: str - """ - if virtual_directory is None: - raise ValueError("Invalid value for `virtual_directory`, must not be `None`") # noqa: E501 - - self._virtual_directory = virtual_directory - - @property - def blob_file_token(self): - """Gets the blob_file_token of this CreateInboxFileRequest. # noqa: E501 - - Token representing the uploaded file # noqa: E501 - - :return: The blob_file_token of this CreateInboxFileRequest. # noqa: E501 - :rtype: str - """ - return self._blob_file_token - - @blob_file_token.setter - def blob_file_token(self, blob_file_token): - """Sets the blob_file_token of this CreateInboxFileRequest. - - Token representing the uploaded file # noqa: E501 - - :param blob_file_token: The blob_file_token of this CreateInboxFileRequest. # noqa: E501 - :type: str - """ - if blob_file_token is None: - raise ValueError("Invalid value for `blob_file_token`, must not be `None`") # noqa: E501 - - self._blob_file_token = blob_file_token - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, CreateInboxFileRequest): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/kaggle/models/dataset_column.py b/kaggle/models/dataset_column.py index 72008d2..5e89b3f 100644 --- a/kaggle/models/dataset_column.py +++ b/kaggle/models/dataset_column.py @@ -1,30 +1,20 @@ -#!/usr/bin/python -# -# Copyright 2024 Kaggle Inc -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 - -""" - Kaggle API +#!/usr/bin/python +# +# Copyright 2024 Kaggle Inc +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. - API for kaggle.com # noqa: E501 - - OpenAPI spec version: 1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" +# coding: utf-8 import pprint @@ -34,19 +24,14 @@ class DatasetColumn(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ Attributes: - swagger_types (dict): The key is attribute name + column_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ - swagger_types = { + column_types = { 'order': 'float', 'name': 'str', 'type': 'str', @@ -202,7 +187,7 @@ def to_dict(self): """Returns the model properties as a dict""" result = {} - for attr, _ in six.iteritems(self.swagger_types): + for attr, _ in six.iteritems(self.column_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( diff --git a/kaggle/models/dataset_new_request.py b/kaggle/models/dataset_new_request.py deleted file mode 100644 index 175a542..0000000 --- a/kaggle/models/dataset_new_request.py +++ /dev/null @@ -1,392 +0,0 @@ -#!/usr/bin/python -# -# Copyright 2024 Kaggle Inc -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 - -""" - Kaggle API - - API for kaggle.com # noqa: E501 - - OpenAPI spec version: 1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - - -import pprint -import re # noqa: F401 - -import six - -from kaggle.models.upload_file import UploadFile # noqa: F401,E501 - - -class DatasetNewRequest(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'title': 'str', - 'slug': 'str', - 'owner_slug': 'str', - 'license_name': 'str', - 'subtitle': 'str', - 'description': 'str', - 'files': 'list[UploadFile]', - 'is_private': 'bool', - 'convert_to_csv': 'bool', - 'category_ids': 'list[str]' - } - - attribute_map = { - 'title': 'title', - 'slug': 'slug', - 'owner_slug': 'ownerSlug', - 'license_name': 'licenseName', - 'subtitle': 'subtitle', - 'description': 'description', - 'files': 'files', - 'is_private': 'isPrivate', - 'convert_to_csv': 'convertToCsv', - 'category_ids': 'categoryIds' - } - - def __init__(self, title=None, slug=None, owner_slug=None, license_name='unknown', subtitle=None, description='', files=None, is_private=True, convert_to_csv=True, category_ids=None): # noqa: E501 - """DatasetNewRequest - a model defined in Swagger""" # noqa: E501 - - self._title = None - self._slug = None - self._owner_slug = None - self._license_name = None - self._subtitle = None - self._description = None - self._files = None - self._is_private = None - self._convert_to_csv = None - self._category_ids = None - self.discriminator = None - - self.title = title - if slug is not None: - self.slug = slug - if owner_slug is not None: - self.owner_slug = owner_slug - if license_name is not None: - self.license_name = license_name - if subtitle is not None: - self.subtitle = subtitle - if description is not None: - self.description = description - self.files = files - if is_private is not None: - self.is_private = is_private - if convert_to_csv is not None: - self.convert_to_csv = convert_to_csv - if category_ids is not None: - self.category_ids = category_ids - - @property - def title(self): - """Gets the title of this DatasetNewRequest. # noqa: E501 - - The title of the new dataset # noqa: E501 - - :return: The title of this DatasetNewRequest. # noqa: E501 - :rtype: str - """ - return self._title - - @title.setter - def title(self, title): - """Sets the title of this DatasetNewRequest. - - The title of the new dataset # noqa: E501 - - :param title: The title of this DatasetNewRequest. # noqa: E501 - :type: str - """ - if title is None: - raise ValueError("Invalid value for `title`, must not be `None`") # noqa: E501 - - self._title = title - - @property - def slug(self): - """Gets the slug of this DatasetNewRequest. # noqa: E501 - - The slug that the dataset should be created with # noqa: E501 - - :return: The slug of this DatasetNewRequest. # noqa: E501 - :rtype: str - """ - return self._slug - - @slug.setter - def slug(self, slug): - """Sets the slug of this DatasetNewRequest. - - The slug that the dataset should be created with # noqa: E501 - - :param slug: The slug of this DatasetNewRequest. # noqa: E501 - :type: str - """ - - self._slug = slug - - @property - def owner_slug(self): - """Gets the owner_slug of this DatasetNewRequest. # noqa: E501 - - The owner's username # noqa: E501 - - :return: The owner_slug of this DatasetNewRequest. # noqa: E501 - :rtype: str - """ - return self._owner_slug - - @owner_slug.setter - def owner_slug(self, owner_slug): - """Sets the owner_slug of this DatasetNewRequest. - - The owner's username # noqa: E501 - - :param owner_slug: The owner_slug of this DatasetNewRequest. # noqa: E501 - :type: str - """ - - self._owner_slug = owner_slug - - @property - def license_name(self): - """Gets the license_name of this DatasetNewRequest. # noqa: E501 - - The license that should be associated with the dataset # noqa: E501 - - :return: The license_name of this DatasetNewRequest. # noqa: E501 - :rtype: str - """ - return self._license_name - - @license_name.setter - def license_name(self, license_name): - """Sets the license_name of this DatasetNewRequest. - - The license that should be associated with the dataset # noqa: E501 - - :param license_name: The license_name of this DatasetNewRequest. # noqa: E501 - :type: str - """ - allowed_values = ["CC0-1.0", "CC-BY-SA-4.0", "GPL-2.0", "ODbL-1.0", "CC-BY-NC-SA-4.0", "unknown", "DbCL-1.0", "CC-BY-SA-3.0", "copyright-authors", "other", "reddit-api", "world-bank", "CC-BY-4.0", "CC-BY-NC-4.0", "PDDL", "CC-BY-3.0", "CC-BY-3.0-IGO", "US-Government-Works", "CC-BY-NC-SA-3.0-IGO", "CDLA-Permissive-1.0", "CDLA-Sharing-1.0", "CC-BY-ND-4.0", "CC-BY-NC-ND-4.0", "ODC-BY-1.0", "LGPL-3.0", "AGPL-3.0", "FDL-1.3", "EU-ODP-Legal-Notice", "apache-2.0", "GPL-3.0"] # noqa: E501 - if license_name not in allowed_values: - raise ValueError( - "Invalid value for `license_name` ({0}), must be one of {1}" # noqa: E501 - .format(license_name, allowed_values) - ) - - self._license_name = license_name - - @property - def subtitle(self): - """Gets the subtitle of this DatasetNewRequest. # noqa: E501 - - The subtitle to be set on the dataset # noqa: E501 - - :return: The subtitle of this DatasetNewRequest. # noqa: E501 - :rtype: str - """ - return self._subtitle - - @subtitle.setter - def subtitle(self, subtitle): - """Sets the subtitle of this DatasetNewRequest. - - The subtitle to be set on the dataset # noqa: E501 - - :param subtitle: The subtitle of this DatasetNewRequest. # noqa: E501 - :type: str - """ - - self._subtitle = subtitle - - @property - def description(self): - """Gets the description of this DatasetNewRequest. # noqa: E501 - - The description to be set on the dataset # noqa: E501 - - :return: The description of this DatasetNewRequest. # noqa: E501 - :rtype: str - """ - return self._description - - @description.setter - def description(self, description): - """Sets the description of this DatasetNewRequest. - - The description to be set on the dataset # noqa: E501 - - :param description: The description of this DatasetNewRequest. # noqa: E501 - :type: str - """ - - self._description = description - - @property - def files(self): - """Gets the files of this DatasetNewRequest. # noqa: E501 - - A list of files that should be associated with the dataset # noqa: E501 - - :return: The files of this DatasetNewRequest. # noqa: E501 - :rtype: list[UploadFile] - """ - return self._files - - @files.setter - def files(self, files): - """Sets the files of this DatasetNewRequest. - - A list of files that should be associated with the dataset # noqa: E501 - - :param files: The files of this DatasetNewRequest. # noqa: E501 - :type: list[UploadFile] - """ - if files is None: - raise ValueError("Invalid value for `files`, must not be `None`") # noqa: E501 - - self._files = files - - @property - def is_private(self): - """Gets the is_private of this DatasetNewRequest. # noqa: E501 - - Whether or not the dataset should be private # noqa: E501 - - :return: The is_private of this DatasetNewRequest. # noqa: E501 - :rtype: bool - """ - return self._is_private - - @is_private.setter - def is_private(self, is_private): - """Sets the is_private of this DatasetNewRequest. - - Whether or not the dataset should be private # noqa: E501 - - :param is_private: The is_private of this DatasetNewRequest. # noqa: E501 - :type: bool - """ - - self._is_private = is_private - - @property - def convert_to_csv(self): - """Gets the convert_to_csv of this DatasetNewRequest. # noqa: E501 - - Whether or not a tabular dataset should be converted to csv # noqa: E501 - - :return: The convert_to_csv of this DatasetNewRequest. # noqa: E501 - :rtype: bool - """ - return self._convert_to_csv - - @convert_to_csv.setter - def convert_to_csv(self, convert_to_csv): - """Sets the convert_to_csv of this DatasetNewRequest. - - Whether or not a tabular dataset should be converted to csv # noqa: E501 - - :param convert_to_csv: The convert_to_csv of this DatasetNewRequest. # noqa: E501 - :type: bool - """ - - self._convert_to_csv = convert_to_csv - - @property - def category_ids(self): - """Gets the category_ids of this DatasetNewRequest. # noqa: E501 - - A list of tag IDs to associated with the dataset # noqa: E501 - - :return: The category_ids of this DatasetNewRequest. # noqa: E501 - :rtype: list[str] - """ - return self._category_ids - - @category_ids.setter - def category_ids(self, category_ids): - """Sets the category_ids of this DatasetNewRequest. - - A list of tag IDs to associated with the dataset # noqa: E501 - - :param category_ids: The category_ids of this DatasetNewRequest. # noqa: E501 - :type: list[str] - """ - - self._category_ids = category_ids - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, DatasetNewRequest): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/kaggle/models/dataset_new_version_request.py b/kaggle/models/dataset_new_version_request.py deleted file mode 100644 index 941184f..0000000 --- a/kaggle/models/dataset_new_version_request.py +++ /dev/null @@ -1,302 +0,0 @@ -#!/usr/bin/python -# -# Copyright 2024 Kaggle Inc -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 - -""" - Kaggle API - - API for kaggle.com # noqa: E501 - - OpenAPI spec version: 1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - - -import pprint -import re # noqa: F401 - -import six - -from kaggle.models.upload_file import UploadFile # noqa: F401,E501 - - -class DatasetNewVersionRequest(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'version_notes': 'str', - 'subtitle': 'str', - 'description': 'str', - 'files': 'list[UploadFile]', - 'convert_to_csv': 'bool', - 'category_ids': 'list[str]', - 'delete_old_versions': 'bool' - } - - attribute_map = { - 'version_notes': 'versionNotes', - 'subtitle': 'subtitle', - 'description': 'description', - 'files': 'files', - 'convert_to_csv': 'convertToCsv', - 'category_ids': 'categoryIds', - 'delete_old_versions': 'deleteOldVersions' - } - - def __init__(self, version_notes=None, subtitle=None, description=None, files=None, convert_to_csv=True, category_ids=None, delete_old_versions=False): # noqa: E501 - """DatasetNewVersionRequest - a model defined in Swagger""" # noqa: E501 - - self._version_notes = None - self._subtitle = None - self._description = None - self._files = None - self._convert_to_csv = None - self._category_ids = None - self._delete_old_versions = None - self.discriminator = None - - self.version_notes = version_notes - if subtitle is not None: - self.subtitle = subtitle - if description is not None: - self.description = description - self.files = files - if convert_to_csv is not None: - self.convert_to_csv = convert_to_csv - if category_ids is not None: - self.category_ids = category_ids - if delete_old_versions is not None: - self.delete_old_versions = delete_old_versions - - @property - def version_notes(self): - """Gets the version_notes of this DatasetNewVersionRequest. # noqa: E501 - - The version notes for the new dataset version # noqa: E501 - - :return: The version_notes of this DatasetNewVersionRequest. # noqa: E501 - :rtype: str - """ - return self._version_notes - - @version_notes.setter - def version_notes(self, version_notes): - """Sets the version_notes of this DatasetNewVersionRequest. - - The version notes for the new dataset version # noqa: E501 - - :param version_notes: The version_notes of this DatasetNewVersionRequest. # noqa: E501 - :type: str - """ - if version_notes is None: - raise ValueError("Invalid value for `version_notes`, must not be `None`") # noqa: E501 - - self._version_notes = version_notes - - @property - def subtitle(self): - """Gets the subtitle of this DatasetNewVersionRequest. # noqa: E501 - - The subtitle to set on the dataset # noqa: E501 - - :return: The subtitle of this DatasetNewVersionRequest. # noqa: E501 - :rtype: str - """ - return self._subtitle - - @subtitle.setter - def subtitle(self, subtitle): - """Sets the subtitle of this DatasetNewVersionRequest. - - The subtitle to set on the dataset # noqa: E501 - - :param subtitle: The subtitle of this DatasetNewVersionRequest. # noqa: E501 - :type: str - """ - - self._subtitle = subtitle - - @property - def description(self): - """Gets the description of this DatasetNewVersionRequest. # noqa: E501 - - The description to set on the dataset # noqa: E501 - - :return: The description of this DatasetNewVersionRequest. # noqa: E501 - :rtype: str - """ - return self._description - - @description.setter - def description(self, description): - """Sets the description of this DatasetNewVersionRequest. - - The description to set on the dataset # noqa: E501 - - :param description: The description of this DatasetNewVersionRequest. # noqa: E501 - :type: str - """ - - self._description = description - - @property - def files(self): - """Gets the files of this DatasetNewVersionRequest. # noqa: E501 - - A list of files that should be associated with the dataset # noqa: E501 - - :return: The files of this DatasetNewVersionRequest. # noqa: E501 - :rtype: list[UploadFile] - """ - return self._files - - @files.setter - def files(self, files): - """Sets the files of this DatasetNewVersionRequest. - - A list of files that should be associated with the dataset # noqa: E501 - - :param files: The files of this DatasetNewVersionRequest. # noqa: E501 - :type: list[UploadFile] - """ - if files is None: - raise ValueError("Invalid value for `files`, must not be `None`") # noqa: E501 - - self._files = files - - @property - def convert_to_csv(self): - """Gets the convert_to_csv of this DatasetNewVersionRequest. # noqa: E501 - - Whether or not a tabular dataset should be converted to csv # noqa: E501 - - :return: The convert_to_csv of this DatasetNewVersionRequest. # noqa: E501 - :rtype: bool - """ - return self._convert_to_csv - - @convert_to_csv.setter - def convert_to_csv(self, convert_to_csv): - """Sets the convert_to_csv of this DatasetNewVersionRequest. - - Whether or not a tabular dataset should be converted to csv # noqa: E501 - - :param convert_to_csv: The convert_to_csv of this DatasetNewVersionRequest. # noqa: E501 - :type: bool - """ - - self._convert_to_csv = convert_to_csv - - @property - def category_ids(self): - """Gets the category_ids of this DatasetNewVersionRequest. # noqa: E501 - - A list of tag IDs to associated with the dataset # noqa: E501 - - :return: The category_ids of this DatasetNewVersionRequest. # noqa: E501 - :rtype: list[str] - """ - return self._category_ids - - @category_ids.setter - def category_ids(self, category_ids): - """Sets the category_ids of this DatasetNewVersionRequest. - - A list of tag IDs to associated with the dataset # noqa: E501 - - :param category_ids: The category_ids of this DatasetNewVersionRequest. # noqa: E501 - :type: list[str] - """ - - self._category_ids = category_ids - - @property - def delete_old_versions(self): - """Gets the delete_old_versions of this DatasetNewVersionRequest. # noqa: E501 - - Whether or not all previous versions of the dataset should be deleted upon creating the new version # noqa: E501 - - :return: The delete_old_versions of this DatasetNewVersionRequest. # noqa: E501 - :rtype: bool - """ - return self._delete_old_versions - - @delete_old_versions.setter - def delete_old_versions(self, delete_old_versions): - """Sets the delete_old_versions of this DatasetNewVersionRequest. - - Whether or not all previous versions of the dataset should be deleted upon creating the new version # noqa: E501 - - :param delete_old_versions: The delete_old_versions of this DatasetNewVersionRequest. # noqa: E501 - :type: bool - """ - - self._delete_old_versions = delete_old_versions - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, DatasetNewVersionRequest): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/kaggle/models/dataset_update_settings_request.py b/kaggle/models/dataset_update_settings_request.py deleted file mode 100644 index eef92f1..0000000 --- a/kaggle/models/dataset_update_settings_request.py +++ /dev/null @@ -1,326 +0,0 @@ -#!/usr/bin/python -# -# Copyright 2024 Kaggle Inc -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 - -""" - Kaggle API - - API for kaggle.com # noqa: E501 - - OpenAPI spec version: 1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - - -import pprint -import re # noqa: F401 - -import six - - -class DatasetUpdateSettingsRequest(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'title': 'str', - 'subtitle': 'str', - 'description': 'str', - 'is_private': 'bool', - 'licenses': 'list[object]', - 'keywords': 'list[str]', - 'collaborators': 'list[object]', - 'data': 'list[object]' - } - - attribute_map = { - 'title': 'title', - 'subtitle': 'subtitle', - 'description': 'description', - 'is_private': 'isPrivate', - 'licenses': 'licenses', - 'keywords': 'keywords', - 'collaborators': 'collaborators', - 'data': 'data' - } - - def __init__(self, title=None, subtitle=None, description=None, is_private=None, licenses=None, keywords=None, collaborators=None, data=None): # noqa: E501 - """DatasetUpdateSettingsRequest - a model defined in Swagger""" # noqa: E501 - - self._title = None - self._subtitle = None - self._description = None - self._is_private = None - self._licenses = None - self._keywords = None - self._collaborators = None - self._data = None - self.discriminator = None - - if title is not None: - self.title = title - if subtitle is not None: - self.subtitle = subtitle - if description is not None: - self.description = description - if is_private is not None: - self.is_private = is_private - if licenses is not None: - self.licenses = licenses - if keywords is not None: - self.keywords = keywords - if collaborators is not None: - self.collaborators = collaborators - if data is not None: - self.data = data - - @property - def title(self): - """Gets the title of this DatasetUpdateSettingsRequest. # noqa: E501 - - Title of the dataset # noqa: E501 - - :return: The title of this DatasetUpdateSettingsRequest. # noqa: E501 - :rtype: str - """ - return self._title - - @title.setter - def title(self, title): - """Sets the title of this DatasetUpdateSettingsRequest. - - Title of the dataset # noqa: E501 - - :param title: The title of this DatasetUpdateSettingsRequest. # noqa: E501 - :type: str - """ - - self._title = title - - @property - def subtitle(self): - """Gets the subtitle of this DatasetUpdateSettingsRequest. # noqa: E501 - - Subtitle of the dataset # noqa: E501 - - :return: The subtitle of this DatasetUpdateSettingsRequest. # noqa: E501 - :rtype: str - """ - return self._subtitle - - @subtitle.setter - def subtitle(self, subtitle): - """Sets the subtitle of this DatasetUpdateSettingsRequest. - - Subtitle of the dataset # noqa: E501 - - :param subtitle: The subtitle of this DatasetUpdateSettingsRequest. # noqa: E501 - :type: str - """ - - self._subtitle = subtitle - - @property - def description(self): - """Gets the description of this DatasetUpdateSettingsRequest. # noqa: E501 - - Decription of the dataset # noqa: E501 - - :return: The description of this DatasetUpdateSettingsRequest. # noqa: E501 - :rtype: str - """ - return self._description - - @description.setter - def description(self, description): - """Sets the description of this DatasetUpdateSettingsRequest. - - Decription of the dataset # noqa: E501 - - :param description: The description of this DatasetUpdateSettingsRequest. # noqa: E501 - :type: str - """ - - self._description = description - - @property - def is_private(self): - """Gets the is_private of this DatasetUpdateSettingsRequest. # noqa: E501 - - Whether or not the dataset should be private # noqa: E501 - - :return: The is_private of this DatasetUpdateSettingsRequest. # noqa: E501 - :rtype: bool - """ - return self._is_private - - @is_private.setter - def is_private(self, is_private): - """Sets the is_private of this DatasetUpdateSettingsRequest. - - Whether or not the dataset should be private # noqa: E501 - - :param is_private: The is_private of this DatasetUpdateSettingsRequest. # noqa: E501 - :type: bool - """ - - self._is_private = is_private - - @property - def licenses(self): - """Gets the licenses of this DatasetUpdateSettingsRequest. # noqa: E501 - - A list of licenses that apply to this dataset # noqa: E501 - - :return: The licenses of this DatasetUpdateSettingsRequest. # noqa: E501 - :rtype: list[object] - """ - return self._licenses - - @licenses.setter - def licenses(self, licenses): - """Sets the licenses of this DatasetUpdateSettingsRequest. - - A list of licenses that apply to this dataset # noqa: E501 - - :param licenses: The licenses of this DatasetUpdateSettingsRequest. # noqa: E501 - :type: list[object] - """ - - self._licenses = licenses - - @property - def keywords(self): - """Gets the keywords of this DatasetUpdateSettingsRequest. # noqa: E501 - - A list of keywords that apply to this dataset # noqa: E501 - - :return: The keywords of this DatasetUpdateSettingsRequest. # noqa: E501 - :rtype: list[str] - """ - return self._keywords - - @keywords.setter - def keywords(self, keywords): - """Sets the keywords of this DatasetUpdateSettingsRequest. - - A list of keywords that apply to this dataset # noqa: E501 - - :param keywords: The keywords of this DatasetUpdateSettingsRequest. # noqa: E501 - :type: list[str] - """ - - self._keywords = keywords - - @property - def collaborators(self): - """Gets the collaborators of this DatasetUpdateSettingsRequest. # noqa: E501 - - A list of collaborators that may read or edit this dataset # noqa: E501 - - :return: The collaborators of this DatasetUpdateSettingsRequest. # noqa: E501 - :rtype: list[object] - """ - return self._collaborators - - @collaborators.setter - def collaborators(self, collaborators): - """Sets the collaborators of this DatasetUpdateSettingsRequest. - - A list of collaborators that may read or edit this dataset # noqa: E501 - - :param collaborators: The collaborators of this DatasetUpdateSettingsRequest. # noqa: E501 - :type: list[object] - """ - - self._collaborators = collaborators - - @property - def data(self): - """Gets the data of this DatasetUpdateSettingsRequest. # noqa: E501 - - A list containing metadata for each file in the dataset # noqa: E501 - - :return: The data of this DatasetUpdateSettingsRequest. # noqa: E501 - :rtype: list[object] - """ - return self._data - - @data.setter - def data(self, data): - """Sets the data of this DatasetUpdateSettingsRequest. - - A list containing metadata for each file in the dataset # noqa: E501 - - :param data: The data of this DatasetUpdateSettingsRequest. # noqa: E501 - :type: list[object] - """ - - self._data = data - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, DatasetUpdateSettingsRequest): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/kaggle/models/error.py b/kaggle/models/error.py deleted file mode 100644 index ac7253e..0000000 --- a/kaggle/models/error.py +++ /dev/null @@ -1,158 +0,0 @@ -#!/usr/bin/python -# -# Copyright 2024 Kaggle Inc -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 - -""" - Kaggle API - - API for kaggle.com # noqa: E501 - - OpenAPI spec version: 1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - - -import pprint -import re # noqa: F401 - -import six - - -class Error(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'code': 'int', - 'message': 'str' - } - - attribute_map = { - 'code': 'code', - 'message': 'message' - } - - def __init__(self, code=None, message=None): # noqa: E501 - """Error - a model defined in Swagger""" # noqa: E501 - - self._code = None - self._message = None - self.discriminator = None - - if code is not None: - self.code = code - if message is not None: - self.message = message - - @property - def code(self): - """Gets the code of this Error. # noqa: E501 - - The server error code returned # noqa: E501 - - :return: The code of this Error. # noqa: E501 - :rtype: int - """ - return self._code - - @code.setter - def code(self, code): - """Sets the code of this Error. - - The server error code returned # noqa: E501 - - :param code: The code of this Error. # noqa: E501 - :type: int - """ - - self._code = code - - @property - def message(self): - """Gets the message of this Error. # noqa: E501 - - The error message generated by the server # noqa: E501 - - :return: The message of this Error. # noqa: E501 - :rtype: str - """ - return self._message - - @message.setter - def message(self, message): - """Sets the message of this Error. - - The error message generated by the server # noqa: E501 - - :param message: The message of this Error. # noqa: E501 - :type: str - """ - - self._message = message - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Error): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/kaggle/models/kernel_push_request.py b/kaggle/models/kernel_push_request.py deleted file mode 100644 index 8d7cac8..0000000 --- a/kaggle/models/kernel_push_request.py +++ /dev/null @@ -1,571 +0,0 @@ -#!/usr/bin/python -# -# Copyright 2024 Kaggle Inc -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 - -""" - Kaggle API - - API for kaggle.com # noqa: E501 - - OpenAPI spec version: 1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - - -import pprint -import re # noqa: F401 - -import six - - -class KernelPushRequest(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'id': 'int', - 'slug': 'str', - 'new_title': 'str', - 'text': 'str', - 'language': 'str', - 'kernel_type': 'str', - 'is_private': 'bool', - 'enable_gpu': 'bool', - 'enable_tpu': 'bool', - 'enable_internet': 'bool', - 'dataset_data_sources': 'list[str]', - 'competition_data_sources': 'list[str]', - 'kernel_data_sources': 'list[str]', - 'model_data_sources': 'list[str]', - 'category_ids': 'list[str]', - 'docker_image_pinning_type': 'str' - } - - attribute_map = { - 'id': 'id', - 'slug': 'slug', - 'new_title': 'newTitle', - 'text': 'text', - 'language': 'language', - 'kernel_type': 'kernelType', - 'is_private': 'isPrivate', - 'enable_gpu': 'enableGpu', - 'enable_tpu': 'enableTpu', - 'enable_internet': 'enableInternet', - 'dataset_data_sources': 'datasetDataSources', - 'competition_data_sources': 'competitionDataSources', - 'kernel_data_sources': 'kernelDataSources', - 'model_data_sources': 'modelDataSources', - 'category_ids': 'categoryIds', - 'docker_image_pinning_type': 'dockerImagePinningType' - } - - def __init__(self, id=None, slug=None, new_title=None, text=None, language=None, kernel_type=None, is_private=None, enable_gpu=None, enable_tpu=None, enable_internet=None, dataset_data_sources=None, competition_data_sources=None, kernel_data_sources=None, model_data_sources=None, category_ids=None, docker_image_pinning_type=None): # noqa: E501 - """KernelPushRequest - a model defined in Swagger""" # noqa: E501 - - self._id = None - self._slug = None - self._new_title = None - self._text = None - self._language = None - self._kernel_type = None - self._is_private = None - self._enable_gpu = None - self._enable_tpu = None - self._enable_internet = None - self._dataset_data_sources = None - self._competition_data_sources = None - self._kernel_data_sources = None - self._model_data_sources = None - self._category_ids = None - self._docker_image_pinning_type = None - self.discriminator = None - - if id is not None: - self.id = id - if slug is not None: - self.slug = slug - if new_title is not None: - self.new_title = new_title - self.text = text - self.language = language - self.kernel_type = kernel_type - if is_private is not None: - self.is_private = is_private - if enable_gpu is not None: - self.enable_gpu = enable_gpu - if enable_tpu is not None: - self.enable_tpu = enable_tpu - if enable_internet is not None: - self.enable_internet = enable_internet - if dataset_data_sources is not None: - self.dataset_data_sources = dataset_data_sources - if competition_data_sources is not None: - self.competition_data_sources = competition_data_sources - if kernel_data_sources is not None: - self.kernel_data_sources = kernel_data_sources - if model_data_sources is not None: - self.model_data_sources = model_data_sources - if category_ids is not None: - self.category_ids = category_ids - if docker_image_pinning_type is not None: - self.docker_image_pinning_type = docker_image_pinning_type - - @property - def id(self): - """Gets the id of this KernelPushRequest. # noqa: E501 - - The kernel's ID number. One of `id` and `slug` are required. If both are specified, `id` will be preferred # noqa: E501 - - :return: The id of this KernelPushRequest. # noqa: E501 - :rtype: int - """ - return self._id - - @id.setter - def id(self, id): - """Sets the id of this KernelPushRequest. - - The kernel's ID number. One of `id` and `slug` are required. If both are specified, `id` will be preferred # noqa: E501 - - :param id: The id of this KernelPushRequest. # noqa: E501 - :type: int - """ - - self._id = id - - @property - def slug(self): - """Gets the slug of this KernelPushRequest. # noqa: E501 - - The full slug of the kernel to push to, in the format `USERNAME/KERNEL-SLUG`. The kernel slug must be the title lowercased with dashes (`-`) replacing spaces. One of `id` and `slug` are required. If both are specified, `id` will be preferred # noqa: E501 - - :return: The slug of this KernelPushRequest. # noqa: E501 - :rtype: str - """ - return self._slug - - @slug.setter - def slug(self, slug): - """Sets the slug of this KernelPushRequest. - - The full slug of the kernel to push to, in the format `USERNAME/KERNEL-SLUG`. The kernel slug must be the title lowercased with dashes (`-`) replacing spaces. One of `id` and `slug` are required. If both are specified, `id` will be preferred # noqa: E501 - - :param slug: The slug of this KernelPushRequest. # noqa: E501 - :type: str - """ - - self._slug = slug - - @property - def new_title(self): - """Gets the new_title of this KernelPushRequest. # noqa: E501 - - The title to be set on the kernel # noqa: E501 - - :return: The new_title of this KernelPushRequest. # noqa: E501 - :rtype: str - """ - return self._new_title - - @new_title.setter - def new_title(self, new_title): - """Sets the new_title of this KernelPushRequest. - - The title to be set on the kernel # noqa: E501 - - :param new_title: The new_title of this KernelPushRequest. # noqa: E501 - :type: str - """ - - self._new_title = new_title - - @property - def text(self): - """Gets the text of this KernelPushRequest. # noqa: E501 - - The kernel's source code # noqa: E501 - - :return: The text of this KernelPushRequest. # noqa: E501 - :rtype: str - """ - return self._text - - @text.setter - def text(self, text): - """Sets the text of this KernelPushRequest. - - The kernel's source code # noqa: E501 - - :param text: The text of this KernelPushRequest. # noqa: E501 - :type: str - """ - if text is None: - raise ValueError("Invalid value for `text`, must not be `None`") # noqa: E501 - - self._text = text - - @property - def language(self): - """Gets the language of this KernelPushRequest. # noqa: E501 - - The language that the kernel is written in # noqa: E501 - - :return: The language of this KernelPushRequest. # noqa: E501 - :rtype: str - """ - return self._language - - @language.setter - def language(self, language): - """Sets the language of this KernelPushRequest. - - The language that the kernel is written in # noqa: E501 - - :param language: The language of this KernelPushRequest. # noqa: E501 - :type: str - """ - if language is None: - raise ValueError("Invalid value for `language`, must not be `None`") # noqa: E501 - allowed_values = ["python", "r", "rmarkdown"] # noqa: E501 - if language not in allowed_values: - raise ValueError( - "Invalid value for `language` ({0}), must be one of {1}" # noqa: E501 - .format(language, allowed_values) - ) - - self._language = language - - @property - def kernel_type(self): - """Gets the kernel_type of this KernelPushRequest. # noqa: E501 - - The type of kernel. Cannot be changed once the kernel has been created # noqa: E501 - - :return: The kernel_type of this KernelPushRequest. # noqa: E501 - :rtype: str - """ - return self._kernel_type - - @kernel_type.setter - def kernel_type(self, kernel_type): - """Sets the kernel_type of this KernelPushRequest. - - The type of kernel. Cannot be changed once the kernel has been created # noqa: E501 - - :param kernel_type: The kernel_type of this KernelPushRequest. # noqa: E501 - :type: str - """ - if kernel_type is None: - raise ValueError("Invalid value for `kernel_type`, must not be `None`") # noqa: E501 - allowed_values = ["script", "notebook"] # noqa: E501 - if kernel_type not in allowed_values: - raise ValueError( - "Invalid value for `kernel_type` ({0}), must be one of {1}" # noqa: E501 - .format(kernel_type, allowed_values) - ) - - self._kernel_type = kernel_type - - @property - def is_private(self): - """Gets the is_private of this KernelPushRequest. # noqa: E501 - - Whether or not the kernel should be private # noqa: E501 - - :return: The is_private of this KernelPushRequest. # noqa: E501 - :rtype: bool - """ - return self._is_private - - @is_private.setter - def is_private(self, is_private): - """Sets the is_private of this KernelPushRequest. - - Whether or not the kernel should be private # noqa: E501 - - :param is_private: The is_private of this KernelPushRequest. # noqa: E501 - :type: bool - """ - - self._is_private = is_private - - @property - def enable_gpu(self): - """Gets the enable_gpu of this KernelPushRequest. # noqa: E501 - - Whether or not the kernel should run on a GPU # noqa: E501 - - :return: The enable_gpu of this KernelPushRequest. # noqa: E501 - :rtype: bool - """ - return self._enable_gpu - - @enable_gpu.setter - def enable_gpu(self, enable_gpu): - """Sets the enable_gpu of this KernelPushRequest. - - Whether or not the kernel should run on a GPU # noqa: E501 - - :param enable_gpu: The enable_gpu of this KernelPushRequest. # noqa: E501 - :type: bool - """ - - self._enable_gpu = enable_gpu - - @property - def enable_tpu(self): - """Gets the enable_tpu of this KernelPushRequest. # noqa: E501 - - Whether or not the kernel should run on a TPU # noqa: E501 - - :return: The enable_tpu of this KernelPushRequest. # noqa: E501 - :rtype: bool - """ - return self._enable_tpu - - @enable_tpu.setter - def enable_tpu(self, enable_tpu): - """Sets the enable_tpu of this KernelPushRequest. - - Whether or not the kernel should run on a TPU # noqa: E501 - - :param enable_tpu: The enable_tpu of this KernelPushRequest. # noqa: E501 - :type: bool - """ - - self._enable_tpu = enable_tpu - - @property - def enable_internet(self): - """Gets the enable_internet of this KernelPushRequest. # noqa: E501 - - Whether or not the kernel should be able to access the internet # noqa: E501 - - :return: The enable_internet of this KernelPushRequest. # noqa: E501 - :rtype: bool - """ - return self._enable_internet - - @enable_internet.setter - def enable_internet(self, enable_internet): - """Sets the enable_internet of this KernelPushRequest. - - Whether or not the kernel should be able to access the internet # noqa: E501 - - :param enable_internet: The enable_internet of this KernelPushRequest. # noqa: E501 - :type: bool - """ - - self._enable_internet = enable_internet - - @property - def dataset_data_sources(self): - """Gets the dataset_data_sources of this KernelPushRequest. # noqa: E501 - - A list of dataset data sources that the kernel should use. Each dataset is specified as `USERNAME/DATASET-SLUG` # noqa: E501 - - :return: The dataset_data_sources of this KernelPushRequest. # noqa: E501 - :rtype: list[str] - """ - return self._dataset_data_sources - - @dataset_data_sources.setter - def dataset_data_sources(self, dataset_data_sources): - """Sets the dataset_data_sources of this KernelPushRequest. - - A list of dataset data sources that the kernel should use. Each dataset is specified as `USERNAME/DATASET-SLUG` # noqa: E501 - - :param dataset_data_sources: The dataset_data_sources of this KernelPushRequest. # noqa: E501 - :type: list[str] - """ - - self._dataset_data_sources = dataset_data_sources - - @property - def competition_data_sources(self): - """Gets the competition_data_sources of this KernelPushRequest. # noqa: E501 - - A list of competition data sources that the kernel should use # noqa: E501 - - :return: The competition_data_sources of this KernelPushRequest. # noqa: E501 - :rtype: list[str] - """ - return self._competition_data_sources - - @competition_data_sources.setter - def competition_data_sources(self, competition_data_sources): - """Sets the competition_data_sources of this KernelPushRequest. - - A list of competition data sources that the kernel should use # noqa: E501 - - :param competition_data_sources: The competition_data_sources of this KernelPushRequest. # noqa: E501 - :type: list[str] - """ - - self._competition_data_sources = competition_data_sources - - @property - def kernel_data_sources(self): - """Gets the kernel_data_sources of this KernelPushRequest. # noqa: E501 - - A list of kernel data sources that the kernel should use. Each dataset is specified as `USERNAME/KERNEL-SLUG` # noqa: E501 - - :return: The kernel_data_sources of this KernelPushRequest. # noqa: E501 - :rtype: list[str] - """ - return self._kernel_data_sources - - @kernel_data_sources.setter - def kernel_data_sources(self, kernel_data_sources): - """Sets the kernel_data_sources of this KernelPushRequest. - - A list of kernel data sources that the kernel should use. Each dataset is specified as `USERNAME/KERNEL-SLUG` # noqa: E501 - - :param kernel_data_sources: The kernel_data_sources of this KernelPushRequest. # noqa: E501 - :type: list[str] - """ - - self._kernel_data_sources = kernel_data_sources - - @property - def model_data_sources(self): - """Gets the model_data_sources of this KernelPushRequest. # noqa: E501 - - A list of model data sources that the kernel should use. Each model is specified as `USERNAME/MODEL-SLUG/FRAMEWORK/VARIATION-SLUG/VERSION-NUMBER` # noqa: E501 - - :return: The model_data_sources of this KernelPushRequest. # noqa: E501 - :rtype: list[str] - """ - return self._model_data_sources - - @model_data_sources.setter - def model_data_sources(self, model_data_sources): - """Sets the model_data_sources of this KernelPushRequest. - - A list of model data sources that the kernel should use. Each model is specified as `USERNAME/MODEL-SLUG/FRAMEWORK/VARIATION-SLUG/VERSION-NUMBER` # noqa: E501 - - :param model_data_sources: The model_data_sources of this KernelPushRequest. # noqa: E501 - :type: list[str] - """ - - self._model_data_sources = model_data_sources - - @property - def category_ids(self): - """Gets the category_ids of this KernelPushRequest. # noqa: E501 - - A list of tag IDs to associated with the kernel # noqa: E501 - - :return: The category_ids of this KernelPushRequest. # noqa: E501 - :rtype: list[str] - """ - return self._category_ids - - @category_ids.setter - def category_ids(self, category_ids): - """Sets the category_ids of this KernelPushRequest. - - A list of tag IDs to associated with the kernel # noqa: E501 - - :param category_ids: The category_ids of this KernelPushRequest. # noqa: E501 - :type: list[str] - """ - - self._category_ids = category_ids - - @property - def docker_image_pinning_type(self): - """Gets the docker_image_pinning_type of this KernelPushRequest. # noqa: E501 - - Which docker image to use for executing new versions going forward. # noqa: E501 - - :return: The docker_image_pinning_type of this KernelPushRequest. # noqa: E501 - :rtype: str - """ - return self._docker_image_pinning_type - - @docker_image_pinning_type.setter - def docker_image_pinning_type(self, docker_image_pinning_type): - """Sets the docker_image_pinning_type of this KernelPushRequest. - - Which docker image to use for executing new versions going forward. # noqa: E501 - - :param docker_image_pinning_type: The docker_image_pinning_type of this KernelPushRequest. # noqa: E501 - :type: str - """ - allowed_values = ["original", "latest"] # noqa: E501 - if docker_image_pinning_type not in allowed_values: - raise ValueError( - "Invalid value for `docker_image_pinning_type` ({0}), must be one of {1}" # noqa: E501 - .format(docker_image_pinning_type, allowed_values) - ) - - self._docker_image_pinning_type = docker_image_pinning_type - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, KernelPushRequest): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/kaggle/models/license.py b/kaggle/models/license.py deleted file mode 100644 index 0135e8e..0000000 --- a/kaggle/models/license.py +++ /dev/null @@ -1,137 +0,0 @@ -#!/usr/bin/python -# -# Copyright 2024 Kaggle Inc -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 - -""" - Kaggle API - - API for kaggle.com # noqa: E501 - - OpenAPI spec version: 1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - - -import pprint -import re # noqa: F401 - -import six - - -class License(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'name': 'str' - } - - attribute_map = { - 'name': 'name' - } - - def __init__(self, name=None): # noqa: E501 - """License - a model defined in Swagger""" # noqa: E501 - - self._name = None - self.discriminator = None - - self.name = name - - @property - def name(self): - """Gets the name of this License. # noqa: E501 - - Name of the license # noqa: E501 - - :return: The name of this License. # noqa: E501 - :rtype: str - """ - return self._name - - @name.setter - def name(self, name): - """Sets the name of this License. - - Name of the license # noqa: E501 - - :param name: The name of this License. # noqa: E501 - :type: str - """ - if name is None: - raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 - allowed_values = ["CC0-1.0", "CC-BY-SA-4.0", "GPL-2.0", "ODbL-1.0", "CC-BY-NC-SA-4.0", "unknown", "DbCL-1.0", "CC-BY-SA-3.0", "copyright-authors", "other", "reddit-api", "world-bank", "CC-BY-4.0", "CC-BY-NC-4.0", "PDDL", "CC-BY-3.0", "CC-BY-3.0-IGO", "US-Government-Works", "CC-BY-NC-SA-3.0-IGO", "CDLA-Permissive-1.0", "CDLA-Sharing-1.0", "CC-BY-ND-4.0", "CC-BY-NC-ND-4.0", "ODC-BY-1.0", "LGPL-3.0", "AGPL-3.0", "FDL-1.3", "EU-ODP-Legal-Notice", "apache-2.0", "GPL-3.0"] # noqa: E501 - if name not in allowed_values: - raise ValueError( - "Invalid value for `name` ({0}), must be one of {1}" # noqa: E501 - .format(name, allowed_values) - ) - - self._name = name - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, License): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/kaggle/models/model_instance_new_version_request.py b/kaggle/models/model_instance_new_version_request.py deleted file mode 100644 index d6bdb9e..0000000 --- a/kaggle/models/model_instance_new_version_request.py +++ /dev/null @@ -1,161 +0,0 @@ -#!/usr/bin/python -# -# Copyright 2024 Kaggle Inc -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 - -""" - Kaggle API - - API for kaggle.com # noqa: E501 - - OpenAPI spec version: 1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - - -import pprint -import re # noqa: F401 - -import six - -from kaggle.models.upload_file import UploadFile # noqa: F401,E501 - - -class ModelInstanceNewVersionRequest(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'version_notes': 'str', - 'files': 'list[UploadFile]' - } - - attribute_map = { - 'version_notes': 'versionNotes', - 'files': 'files' - } - - def __init__(self, version_notes=None, files=None): # noqa: E501 - """ModelInstanceNewVersionRequest - a model defined in Swagger""" # noqa: E501 - - self._version_notes = None - self._files = None - self.discriminator = None - - if version_notes is not None: - self.version_notes = version_notes - self.files = files - - @property - def version_notes(self): - """Gets the version_notes of this ModelInstanceNewVersionRequest. # noqa: E501 - - The version notes for the model instance version # noqa: E501 - - :return: The version_notes of this ModelInstanceNewVersionRequest. # noqa: E501 - :rtype: str - """ - return self._version_notes - - @version_notes.setter - def version_notes(self, version_notes): - """Sets the version_notes of this ModelInstanceNewVersionRequest. - - The version notes for the model instance version # noqa: E501 - - :param version_notes: The version_notes of this ModelInstanceNewVersionRequest. # noqa: E501 - :type: str - """ - - self._version_notes = version_notes - - @property - def files(self): - """Gets the files of this ModelInstanceNewVersionRequest. # noqa: E501 - - A list of files that should be associated with the model instance version # noqa: E501 - - :return: The files of this ModelInstanceNewVersionRequest. # noqa: E501 - :rtype: list[UploadFile] - """ - return self._files - - @files.setter - def files(self, files): - """Sets the files of this ModelInstanceNewVersionRequest. - - A list of files that should be associated with the model instance version # noqa: E501 - - :param files: The files of this ModelInstanceNewVersionRequest. # noqa: E501 - :type: list[UploadFile] - """ - if files is None: - raise ValueError("Invalid value for `files`, must not be `None`") # noqa: E501 - - self._files = files - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, ModelInstanceNewVersionRequest): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/kaggle/models/model_instance_update_request.py b/kaggle/models/model_instance_update_request.py deleted file mode 100644 index 233b05f..0000000 --- a/kaggle/models/model_instance_update_request.py +++ /dev/null @@ -1,367 +0,0 @@ -#!/usr/bin/python -# -# Copyright 2024 Kaggle Inc -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 - -""" - Kaggle API - - API for kaggle.com # noqa: E501 - - OpenAPI spec version: 1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - - -import pprint -import re # noqa: F401 - -import six - - -class ModelInstanceUpdateRequest(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'overview': 'str', - 'usage': 'str', - 'license_name': 'str', - 'fine_tunable': 'bool', - 'training_data': 'list[str]', - 'model_instance_type': 'str', - 'base_model_instance': 'str', - 'external_base_model_url': 'int', - 'update_mask': 'str' - } - - attribute_map = { - 'overview': 'overview', - 'usage': 'usage', - 'license_name': 'licenseName', - 'fine_tunable': 'fineTunable', - 'training_data': 'trainingData', - 'model_instance_type': 'modelInstanceType', - 'base_model_instance': 'baseModelInstance', - 'external_base_model_url': 'externalBaseModelUrl', - 'update_mask': 'updateMask' - } - - def __init__(self, overview=None, usage=None, license_name='Apache 2.0', fine_tunable=True, training_data=None, model_instance_type=None, base_model_instance=None, external_base_model_url=None, update_mask=None): # noqa: E501 - """ModelInstanceUpdateRequest - a model defined in Swagger""" # noqa: E501 - - self._overview = None - self._usage = None - self._license_name = None - self._fine_tunable = None - self._training_data = None - self._model_instance_type = None - self._base_model_instance = None - self._external_base_model_url = None - self._update_mask = None - self.discriminator = None - - if overview is not None: - self.overview = overview - if usage is not None: - self.usage = usage - if license_name is not None: - self.license_name = license_name - if fine_tunable is not None: - self.fine_tunable = fine_tunable - if training_data is not None: - self.training_data = training_data - if model_instance_type is not None: - self.model_instance_type = model_instance_type - if base_model_instance is not None: - self.base_model_instance = base_model_instance - if external_base_model_url is not None: - self.external_base_model_url = external_base_model_url - self.update_mask = update_mask - - @property - def overview(self): - """Gets the overview of this ModelInstanceUpdateRequest. # noqa: E501 - - The overview of the model instance (markdown) # noqa: E501 - - :return: The overview of this ModelInstanceUpdateRequest. # noqa: E501 - :rtype: str - """ - return self._overview - - @overview.setter - def overview(self, overview): - """Sets the overview of this ModelInstanceUpdateRequest. - - The overview of the model instance (markdown) # noqa: E501 - - :param overview: The overview of this ModelInstanceUpdateRequest. # noqa: E501 - :type: str - """ - - self._overview = overview - - @property - def usage(self): - """Gets the usage of this ModelInstanceUpdateRequest. # noqa: E501 - - The description of how to use the model instance (markdown) # noqa: E501 - - :return: The usage of this ModelInstanceUpdateRequest. # noqa: E501 - :rtype: str - """ - return self._usage - - @usage.setter - def usage(self, usage): - """Sets the usage of this ModelInstanceUpdateRequest. - - The description of how to use the model instance (markdown) # noqa: E501 - - :param usage: The usage of this ModelInstanceUpdateRequest. # noqa: E501 - :type: str - """ - - self._usage = usage - - @property - def license_name(self): - """Gets the license_name of this ModelInstanceUpdateRequest. # noqa: E501 - - The license that should be associated with the model instance # noqa: E501 - - :return: The license_name of this ModelInstanceUpdateRequest. # noqa: E501 - :rtype: str - """ - return self._license_name - - @license_name.setter - def license_name(self, license_name): - """Sets the license_name of this ModelInstanceUpdateRequest. - - The license that should be associated with the model instance # noqa: E501 - - :param license_name: The license_name of this ModelInstanceUpdateRequest. # noqa: E501 - :type: str - """ - allowed_values = ["CC0 1.0", "CC BY-NC-SA 4.0", "Unknown", "CC BY-SA 4.0", "GPL 2", "CC BY-SA 3.0", "Other", "Other (specified in description)", "CC BY 4.0", "Attribution 4.0 International (CC BY 4.0)", "CC BY-NC 4.0", "Attribution-NonCommercial 4.0 International (CC BY-NC 4.0)", "PDDL", "ODC Public Domain Dedication and Licence (PDDL)", "CC BY 3.0", "Attribution 3.0 Unported (CC BY 3.0)", "CC BY 3.0 IGO", "Attribution 3.0 IGO (CC BY 3.0 IGO)", "CC BY-NC-SA 3.0 IGO", "Attribution-NonCommercial-ShareAlike 3.0 IGO (CC BY-NC-SA 3.0 IGO)", "CDLA Permissive 1.0", "Community Data License Agreement - Permissive - Version 1.0", "CDLA Sharing 1.0", "Community Data License Agreement - Sharing - Version 1.0", "CC BY-ND 4.0", "Attribution-NoDerivatives 4.0 International (CC BY-ND 4.0)", "CC BY-NC-ND 4.0", "Attribution-NonCommercial-NoDerivatives 4.0 International (CC BY-NC-ND 4.0)", "ODC-BY 1.0", "ODC Attribution License (ODC-By)", "LGPL 3.0", "GNU Lesser General Public License 3.0", "AGPL 3.0", "GNU Affero General Public License 3.0", "FDL 1.3", "GNU Free Documentation License 1.3", "apache-2.0", "Apache 2.0", "mit", "MIT", "bsd-3-clause", "BSD-3-Clause", "Llama 2", "Llama 2 Community License", "Gemma", "gpl-3", "GPL 3", "RAIL-M", "AI Pubs Open RAIL-M License", "AIPubs Research-Use RAIL-M", "AI Pubs Research-Use RAIL-M License", "BigScience OpenRAIL-M", "BigScience Open RAIL-M License", "RAIL", "RAIL (specified in description)", "Llama 3", "Llama 3 Community License"] # noqa: E501 - if license_name not in allowed_values: - raise ValueError( - "Invalid value for `license_name` ({0}), must be one of {1}" # noqa: E501 - .format(license_name, allowed_values) - ) - - self._license_name = license_name - - @property - def fine_tunable(self): - """Gets the fine_tunable of this ModelInstanceUpdateRequest. # noqa: E501 - - Whether the model instance is fine tunable # noqa: E501 - - :return: The fine_tunable of this ModelInstanceUpdateRequest. # noqa: E501 - :rtype: bool - """ - return self._fine_tunable - - @fine_tunable.setter - def fine_tunable(self, fine_tunable): - """Sets the fine_tunable of this ModelInstanceUpdateRequest. - - Whether the model instance is fine tunable # noqa: E501 - - :param fine_tunable: The fine_tunable of this ModelInstanceUpdateRequest. # noqa: E501 - :type: bool - """ - - self._fine_tunable = fine_tunable - - @property - def training_data(self): - """Gets the training_data of this ModelInstanceUpdateRequest. # noqa: E501 - - A list of training data (urls or names) # noqa: E501 - - :return: The training_data of this ModelInstanceUpdateRequest. # noqa: E501 - :rtype: list[str] - """ - return self._training_data - - @training_data.setter - def training_data(self, training_data): - """Sets the training_data of this ModelInstanceUpdateRequest. - - A list of training data (urls or names) # noqa: E501 - - :param training_data: The training_data of this ModelInstanceUpdateRequest. # noqa: E501 - :type: list[str] - """ - - self._training_data = training_data - - @property - def model_instance_type(self): - """Gets the model_instance_type of this ModelInstanceUpdateRequest. # noqa: E501 - - Whether the model instance is a base model, external variant, internal variant, or unspecified # noqa: E501 - - :return: The model_instance_type of this ModelInstanceUpdateRequest. # noqa: E501 - :rtype: str - """ - return self._model_instance_type - - @model_instance_type.setter - def model_instance_type(self, model_instance_type): - """Sets the model_instance_type of this ModelInstanceUpdateRequest. - - Whether the model instance is a base model, external variant, internal variant, or unspecified # noqa: E501 - - :param model_instance_type: The model_instance_type of this ModelInstanceUpdateRequest. # noqa: E501 - :type: str - """ - allowed_values = ["Unspecified", "BaseModel", "KaggleVariant", "ExternalVariant"] # noqa: E501 - if model_instance_type not in allowed_values: - raise ValueError( - "Invalid value for `model_instance_type` ({0}), must be one of {1}" # noqa: E501 - .format(model_instance_type, allowed_values) - ) - - self._model_instance_type = model_instance_type - - @property - def base_model_instance(self): - """Gets the base_model_instance of this ModelInstanceUpdateRequest. # noqa: E501 - - If this is an internal variant, the `{owner-slug}/{model-slug}/{framework}/{instance-slug}` of the base model instance # noqa: E501 - - :return: The base_model_instance of this ModelInstanceUpdateRequest. # noqa: E501 - :rtype: str - """ - return self._base_model_instance - - @base_model_instance.setter - def base_model_instance(self, base_model_instance): - """Sets the base_model_instance of this ModelInstanceUpdateRequest. - - If this is an internal variant, the `{owner-slug}/{model-slug}/{framework}/{instance-slug}` of the base model instance # noqa: E501 - - :param base_model_instance: The base_model_instance of this ModelInstanceUpdateRequest. # noqa: E501 - :type: str - """ - - self._base_model_instance = base_model_instance - - @property - def external_base_model_url(self): - """Gets the external_base_model_url of this ModelInstanceUpdateRequest. # noqa: E501 - - If this is an external variant, a URL to the base model # noqa: E501 - - :return: The external_base_model_url of this ModelInstanceUpdateRequest. # noqa: E501 - :rtype: int - """ - return self._external_base_model_url - - @external_base_model_url.setter - def external_base_model_url(self, external_base_model_url): - """Sets the external_base_model_url of this ModelInstanceUpdateRequest. - - If this is an external variant, a URL to the base model # noqa: E501 - - :param external_base_model_url: The external_base_model_url of this ModelInstanceUpdateRequest. # noqa: E501 - :type: int - """ - - self._external_base_model_url = external_base_model_url - - @property - def update_mask(self): - """Gets the update_mask of this ModelInstanceUpdateRequest. # noqa: E501 - - Describes which fields to update # noqa: E501 - - :return: The update_mask of this ModelInstanceUpdateRequest. # noqa: E501 - :rtype: str - """ - return self._update_mask - - @update_mask.setter - def update_mask(self, update_mask): - """Sets the update_mask of this ModelInstanceUpdateRequest. - - Describes which fields to update # noqa: E501 - - :param update_mask: The update_mask of this ModelInstanceUpdateRequest. # noqa: E501 - :type: str - """ - if update_mask is None: - raise ValueError("Invalid value for `update_mask`, must not be `None`") # noqa: E501 - - self._update_mask = update_mask - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, ModelInstanceUpdateRequest): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/kaggle/models/model_new_instance_request.py b/kaggle/models/model_new_instance_request.py deleted file mode 100644 index 3d5ac2f..0000000 --- a/kaggle/models/model_new_instance_request.py +++ /dev/null @@ -1,433 +0,0 @@ -#!/usr/bin/python -# -# Copyright 2024 Kaggle Inc -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 - -""" - Kaggle API - - API for kaggle.com # noqa: E501 - - OpenAPI spec version: 1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - - -import pprint -import re # noqa: F401 - -import six - -from kaggle.models.upload_file import UploadFile # noqa: F401,E501 - - -class ModelNewInstanceRequest(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'instance_slug': 'str', - 'framework': 'str', - 'overview': 'str', - 'usage': 'str', - 'license_name': 'str', - 'fine_tunable': 'bool', - 'training_data': 'list[str]', - 'model_instance_type': 'str', - 'base_model_instance': 'str', - 'external_base_model_url': 'int', - 'files': 'list[UploadFile]' - } - - attribute_map = { - 'instance_slug': 'instanceSlug', - 'framework': 'framework', - 'overview': 'overview', - 'usage': 'usage', - 'license_name': 'licenseName', - 'fine_tunable': 'fineTunable', - 'training_data': 'trainingData', - 'model_instance_type': 'modelInstanceType', - 'base_model_instance': 'baseModelInstance', - 'external_base_model_url': 'externalBaseModelUrl', - 'files': 'files' - } - - def __init__(self, instance_slug=None, framework=None, overview=None, usage=None, license_name='Apache 2.0', fine_tunable=True, training_data=None, model_instance_type=None, base_model_instance=None, external_base_model_url=None, files=None): # noqa: E501 - """ModelNewInstanceRequest - a model defined in Swagger""" # noqa: E501 - - self._instance_slug = None - self._framework = None - self._overview = None - self._usage = None - self._license_name = None - self._fine_tunable = None - self._training_data = None - self._model_instance_type = None - self._base_model_instance = None - self._external_base_model_url = None - self._files = None - self.discriminator = None - - self.instance_slug = instance_slug - self.framework = framework - if overview is not None: - self.overview = overview - if usage is not None: - self.usage = usage - self.license_name = license_name - if fine_tunable is not None: - self.fine_tunable = fine_tunable - if training_data is not None: - self.training_data = training_data - if model_instance_type is not None: - self.model_instance_type = model_instance_type - if base_model_instance is not None: - self.base_model_instance = base_model_instance - if external_base_model_url is not None: - self.external_base_model_url = external_base_model_url - if files is not None: - self.files = files - - @property - def instance_slug(self): - """Gets the instance_slug of this ModelNewInstanceRequest. # noqa: E501 - - The slug that the model instance should be created with # noqa: E501 - - :return: The instance_slug of this ModelNewInstanceRequest. # noqa: E501 - :rtype: str - """ - return self._instance_slug - - @instance_slug.setter - def instance_slug(self, instance_slug): - """Sets the instance_slug of this ModelNewInstanceRequest. - - The slug that the model instance should be created with # noqa: E501 - - :param instance_slug: The instance_slug of this ModelNewInstanceRequest. # noqa: E501 - :type: str - """ - if instance_slug is None: - raise ValueError("Invalid value for `instance_slug`, must not be `None`") # noqa: E501 - - self._instance_slug = instance_slug - - @property - def framework(self): - """Gets the framework of this ModelNewInstanceRequest. # noqa: E501 - - The framework of the model instance # noqa: E501 - - :return: The framework of this ModelNewInstanceRequest. # noqa: E501 - :rtype: str - """ - return self._framework - - @framework.setter - def framework(self, framework): - """Sets the framework of this ModelNewInstanceRequest. - - The framework of the model instance # noqa: E501 - - :param framework: The framework of this ModelNewInstanceRequest. # noqa: E501 - :type: str - """ - if framework is None: - raise ValueError("Invalid value for `framework`, must not be `None`") # noqa: E501 - allowed_values = ["tensorFlow1", "tensorFlow2", "tfLite", "tfJs", "pyTorch", "jax", "flax", "pax", "maxText", "gemmaCpp", "tensorRtLlm", "ggml", "gguf", "coral", "scikitLearn", "mxnet", "onnx", "keras", "transformers", "triton", "other"] # noqa: E501 - if framework not in allowed_values: - raise ValueError( - "Invalid value for `framework` ({0}), must be one of {1}" # noqa: E501 - .format(framework, allowed_values) - ) - - self._framework = framework - - @property - def overview(self): - """Gets the overview of this ModelNewInstanceRequest. # noqa: E501 - - The overview of the model instance (markdown) # noqa: E501 - - :return: The overview of this ModelNewInstanceRequest. # noqa: E501 - :rtype: str - """ - return self._overview - - @overview.setter - def overview(self, overview): - """Sets the overview of this ModelNewInstanceRequest. - - The overview of the model instance (markdown) # noqa: E501 - - :param overview: The overview of this ModelNewInstanceRequest. # noqa: E501 - :type: str - """ - - self._overview = overview - - @property - def usage(self): - """Gets the usage of this ModelNewInstanceRequest. # noqa: E501 - - The description of how to use the model instance (markdown) # noqa: E501 - - :return: The usage of this ModelNewInstanceRequest. # noqa: E501 - :rtype: str - """ - return self._usage - - @usage.setter - def usage(self, usage): - """Sets the usage of this ModelNewInstanceRequest. - - The description of how to use the model instance (markdown) # noqa: E501 - - :param usage: The usage of this ModelNewInstanceRequest. # noqa: E501 - :type: str - """ - - self._usage = usage - - @property - def license_name(self): - """Gets the license_name of this ModelNewInstanceRequest. # noqa: E501 - - The license that should be associated with the model instance # noqa: E501 - - :return: The license_name of this ModelNewInstanceRequest. # noqa: E501 - :rtype: str - """ - return self._license_name - - @license_name.setter - def license_name(self, license_name): - """Sets the license_name of this ModelNewInstanceRequest. - - The license that should be associated with the model instance # noqa: E501 - - :param license_name: The license_name of this ModelNewInstanceRequest. # noqa: E501 - :type: str - """ - if license_name is None: - raise ValueError("Invalid value for `license_name`, must not be `None`") # noqa: E501 - allowed_values = ["CC0 1.0", "CC BY-NC-SA 4.0", "Unknown", "CC BY-SA 4.0", "GPL 2", "CC BY-SA 3.0", "Other", "Other (specified in description)", "CC BY 4.0", "Attribution 4.0 International (CC BY 4.0)", "CC BY-NC 4.0", "Attribution-NonCommercial 4.0 International (CC BY-NC 4.0)", "PDDL", "ODC Public Domain Dedication and Licence (PDDL)", "CC BY 3.0", "Attribution 3.0 Unported (CC BY 3.0)", "CC BY 3.0 IGO", "Attribution 3.0 IGO (CC BY 3.0 IGO)", "CC BY-NC-SA 3.0 IGO", "Attribution-NonCommercial-ShareAlike 3.0 IGO (CC BY-NC-SA 3.0 IGO)", "CDLA Permissive 1.0", "Community Data License Agreement - Permissive - Version 1.0", "CDLA Sharing 1.0", "Community Data License Agreement - Sharing - Version 1.0", "CC BY-ND 4.0", "Attribution-NoDerivatives 4.0 International (CC BY-ND 4.0)", "CC BY-NC-ND 4.0", "Attribution-NonCommercial-NoDerivatives 4.0 International (CC BY-NC-ND 4.0)", "ODC-BY 1.0", "ODC Attribution License (ODC-By)", "LGPL 3.0", "GNU Lesser General Public License 3.0", "AGPL 3.0", "GNU Affero General Public License 3.0", "FDL 1.3", "GNU Free Documentation License 1.3", "apache-2.0", "Apache 2.0", "mit", "MIT", "bsd-3-clause", "BSD-3-Clause", "Llama 2", "Llama 2 Community License", "Gemma", "gpl-3", "GPL 3", "RAIL-M", "AI Pubs Open RAIL-M License", "AIPubs Research-Use RAIL-M", "AI Pubs Research-Use RAIL-M License", "BigScience OpenRAIL-M", "BigScience Open RAIL-M License", "RAIL", "RAIL (specified in description)", "Llama 3", "Llama 3 Community License"] # noqa: E501 - if license_name not in allowed_values: - raise ValueError( - "Invalid value for `license_name` ({0}), must be one of {1}" # noqa: E501 - .format(license_name, allowed_values) - ) - - self._license_name = license_name - - @property - def fine_tunable(self): - """Gets the fine_tunable of this ModelNewInstanceRequest. # noqa: E501 - - Whether the model instance is fine tunable # noqa: E501 - - :return: The fine_tunable of this ModelNewInstanceRequest. # noqa: E501 - :rtype: bool - """ - return self._fine_tunable - - @fine_tunable.setter - def fine_tunable(self, fine_tunable): - """Sets the fine_tunable of this ModelNewInstanceRequest. - - Whether the model instance is fine tunable # noqa: E501 - - :param fine_tunable: The fine_tunable of this ModelNewInstanceRequest. # noqa: E501 - :type: bool - """ - - self._fine_tunable = fine_tunable - - @property - def training_data(self): - """Gets the training_data of this ModelNewInstanceRequest. # noqa: E501 - - A list of training data (urls or names) # noqa: E501 - - :return: The training_data of this ModelNewInstanceRequest. # noqa: E501 - :rtype: list[str] - """ - return self._training_data - - @training_data.setter - def training_data(self, training_data): - """Sets the training_data of this ModelNewInstanceRequest. - - A list of training data (urls or names) # noqa: E501 - - :param training_data: The training_data of this ModelNewInstanceRequest. # noqa: E501 - :type: list[str] - """ - - self._training_data = training_data - - @property - def model_instance_type(self): - """Gets the model_instance_type of this ModelNewInstanceRequest. # noqa: E501 - - Whether the model instance is a base model, external variant, internal variant, or unspecified # noqa: E501 - - :return: The model_instance_type of this ModelNewInstanceRequest. # noqa: E501 - :rtype: str - """ - return self._model_instance_type - - @model_instance_type.setter - def model_instance_type(self, model_instance_type): - """Sets the model_instance_type of this ModelNewInstanceRequest. - - Whether the model instance is a base model, external variant, internal variant, or unspecified # noqa: E501 - - :param model_instance_type: The model_instance_type of this ModelNewInstanceRequest. # noqa: E501 - :type: str - """ - allowed_values = ["Unspecified", "BaseModel", "KaggleVariant", "ExternalVariant"] # noqa: E501 - if model_instance_type not in allowed_values: - raise ValueError( - "Invalid value for `model_instance_type` ({0}), must be one of {1}" # noqa: E501 - .format(model_instance_type, allowed_values) - ) - - self._model_instance_type = model_instance_type - - @property - def base_model_instance(self): - """Gets the base_model_instance of this ModelNewInstanceRequest. # noqa: E501 - - If this is an internal variant, the `{owner-slug}/{model-slug}/{framework}/{instance-slug}` of the base model instance # noqa: E501 - - :return: The base_model_instance of this ModelNewInstanceRequest. # noqa: E501 - :rtype: str - """ - return self._base_model_instance - - @base_model_instance.setter - def base_model_instance(self, base_model_instance): - """Sets the base_model_instance of this ModelNewInstanceRequest. - - If this is an internal variant, the `{owner-slug}/{model-slug}/{framework}/{instance-slug}` of the base model instance # noqa: E501 - - :param base_model_instance: The base_model_instance of this ModelNewInstanceRequest. # noqa: E501 - :type: str - """ - - self._base_model_instance = base_model_instance - - @property - def external_base_model_url(self): - """Gets the external_base_model_url of this ModelNewInstanceRequest. # noqa: E501 - - If this is an external variant, a URL to the base model # noqa: E501 - - :return: The external_base_model_url of this ModelNewInstanceRequest. # noqa: E501 - :rtype: int - """ - return self._external_base_model_url - - @external_base_model_url.setter - def external_base_model_url(self, external_base_model_url): - """Sets the external_base_model_url of this ModelNewInstanceRequest. - - If this is an external variant, a URL to the base model # noqa: E501 - - :param external_base_model_url: The external_base_model_url of this ModelNewInstanceRequest. # noqa: E501 - :type: int - """ - - self._external_base_model_url = external_base_model_url - - @property - def files(self): - """Gets the files of this ModelNewInstanceRequest. # noqa: E501 - - A list of files that should be associated with the model instance version # noqa: E501 - - :return: The files of this ModelNewInstanceRequest. # noqa: E501 - :rtype: list[UploadFile] - """ - return self._files - - @files.setter - def files(self, files): - """Sets the files of this ModelNewInstanceRequest. - - A list of files that should be associated with the model instance version # noqa: E501 - - :param files: The files of this ModelNewInstanceRequest. # noqa: E501 - :type: list[UploadFile] - """ - - self._files = files - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, ModelNewInstanceRequest): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/kaggle/models/model_new_request.py b/kaggle/models/model_new_request.py deleted file mode 100644 index 767c4e7..0000000 --- a/kaggle/models/model_new_request.py +++ /dev/null @@ -1,330 +0,0 @@ -#!/usr/bin/python -# -# Copyright 2024 Kaggle Inc -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 - -""" - Kaggle API - - API for kaggle.com # noqa: E501 - - OpenAPI spec version: 1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - - -import pprint -import re # noqa: F401 - -import six - - -class ModelNewRequest(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'owner_slug': 'str', - 'slug': 'str', - 'title': 'str', - 'subtitle': 'str', - 'is_private': 'bool', - 'description': 'str', - 'publish_time': 'date', - 'provenance_sources': 'str' - } - - attribute_map = { - 'owner_slug': 'ownerSlug', - 'slug': 'slug', - 'title': 'title', - 'subtitle': 'subtitle', - 'is_private': 'isPrivate', - 'description': 'description', - 'publish_time': 'publishTime', - 'provenance_sources': 'provenanceSources' - } - - def __init__(self, owner_slug=None, slug=None, title=None, subtitle=None, is_private=True, description='', publish_time=None, provenance_sources=''): # noqa: E501 - """ModelNewRequest - a model defined in Swagger""" # noqa: E501 - - self._owner_slug = None - self._slug = None - self._title = None - self._subtitle = None - self._is_private = None - self._description = None - self._publish_time = None - self._provenance_sources = None - self.discriminator = None - - self.owner_slug = owner_slug - self.slug = slug - self.title = title - if subtitle is not None: - self.subtitle = subtitle - self.is_private = is_private - if description is not None: - self.description = description - if publish_time is not None: - self.publish_time = publish_time - if provenance_sources is not None: - self.provenance_sources = provenance_sources - - @property - def owner_slug(self): - """Gets the owner_slug of this ModelNewRequest. # noqa: E501 - - The owner's slug # noqa: E501 - - :return: The owner_slug of this ModelNewRequest. # noqa: E501 - :rtype: str - """ - return self._owner_slug - - @owner_slug.setter - def owner_slug(self, owner_slug): - """Sets the owner_slug of this ModelNewRequest. - - The owner's slug # noqa: E501 - - :param owner_slug: The owner_slug of this ModelNewRequest. # noqa: E501 - :type: str - """ - if owner_slug is None: - raise ValueError("Invalid value for `owner_slug`, must not be `None`") # noqa: E501 - - self._owner_slug = owner_slug - - @property - def slug(self): - """Gets the slug of this ModelNewRequest. # noqa: E501 - - The slug that the model should be created with # noqa: E501 - - :return: The slug of this ModelNewRequest. # noqa: E501 - :rtype: str - """ - return self._slug - - @slug.setter - def slug(self, slug): - """Sets the slug of this ModelNewRequest. - - The slug that the model should be created with # noqa: E501 - - :param slug: The slug of this ModelNewRequest. # noqa: E501 - :type: str - """ - if slug is None: - raise ValueError("Invalid value for `slug`, must not be `None`") # noqa: E501 - - self._slug = slug - - @property - def title(self): - """Gets the title of this ModelNewRequest. # noqa: E501 - - The title of the new model # noqa: E501 - - :return: The title of this ModelNewRequest. # noqa: E501 - :rtype: str - """ - return self._title - - @title.setter - def title(self, title): - """Sets the title of this ModelNewRequest. - - The title of the new model # noqa: E501 - - :param title: The title of this ModelNewRequest. # noqa: E501 - :type: str - """ - if title is None: - raise ValueError("Invalid value for `title`, must not be `None`") # noqa: E501 - - self._title = title - - @property - def subtitle(self): - """Gets the subtitle of this ModelNewRequest. # noqa: E501 - - The subtitle of the new model # noqa: E501 - - :return: The subtitle of this ModelNewRequest. # noqa: E501 - :rtype: str - """ - return self._subtitle - - @subtitle.setter - def subtitle(self, subtitle): - """Sets the subtitle of this ModelNewRequest. - - The subtitle of the new model # noqa: E501 - - :param subtitle: The subtitle of this ModelNewRequest. # noqa: E501 - :type: str - """ - - self._subtitle = subtitle - - @property - def is_private(self): - """Gets the is_private of this ModelNewRequest. # noqa: E501 - - Whether or not the model should be private # noqa: E501 - - :return: The is_private of this ModelNewRequest. # noqa: E501 - :rtype: bool - """ - return self._is_private - - @is_private.setter - def is_private(self, is_private): - """Sets the is_private of this ModelNewRequest. - - Whether or not the model should be private # noqa: E501 - - :param is_private: The is_private of this ModelNewRequest. # noqa: E501 - :type: bool - """ - if is_private is None: - raise ValueError("Invalid value for `is_private`, must not be `None`") # noqa: E501 - - self._is_private = is_private - - @property - def description(self): - """Gets the description of this ModelNewRequest. # noqa: E501 - - The description to be set on the model # noqa: E501 - - :return: The description of this ModelNewRequest. # noqa: E501 - :rtype: str - """ - return self._description - - @description.setter - def description(self, description): - """Sets the description of this ModelNewRequest. - - The description to be set on the model # noqa: E501 - - :param description: The description of this ModelNewRequest. # noqa: E501 - :type: str - """ - - self._description = description - - @property - def publish_time(self): - """Gets the publish_time of this ModelNewRequest. # noqa: E501 - - When the model was initially published # noqa: E501 - - :return: The publish_time of this ModelNewRequest. # noqa: E501 - :rtype: date - """ - return self._publish_time - - @publish_time.setter - def publish_time(self, publish_time): - """Sets the publish_time of this ModelNewRequest. - - When the model was initially published # noqa: E501 - - :param publish_time: The publish_time of this ModelNewRequest. # noqa: E501 - :type: date - """ - - self._publish_time = publish_time - - @property - def provenance_sources(self): - """Gets the provenance_sources of this ModelNewRequest. # noqa: E501 - - The provenance sources to be set on the model # noqa: E501 - - :return: The provenance_sources of this ModelNewRequest. # noqa: E501 - :rtype: str - """ - return self._provenance_sources - - @provenance_sources.setter - def provenance_sources(self, provenance_sources): - """Sets the provenance_sources of this ModelNewRequest. - - The provenance sources to be set on the model # noqa: E501 - - :param provenance_sources: The provenance_sources of this ModelNewRequest. # noqa: E501 - :type: str - """ - - self._provenance_sources = provenance_sources - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, ModelNewRequest): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/kaggle/models/model_update_request.py b/kaggle/models/model_update_request.py deleted file mode 100644 index fd4b969..0000000 --- a/kaggle/models/model_update_request.py +++ /dev/null @@ -1,298 +0,0 @@ -#!/usr/bin/python -# -# Copyright 2024 Kaggle Inc -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 - -""" - Kaggle API - - API for kaggle.com # noqa: E501 - - OpenAPI spec version: 1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - - -import pprint -import re # noqa: F401 - -import six - - -class ModelUpdateRequest(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - 'title': 'str', - 'subtitle': 'str', - 'is_private': 'bool', - 'description': 'str', - 'publish_time': 'date', - 'provenance_sources': 'str', - 'update_mask': 'str' - } - - attribute_map = { - 'title': 'title', - 'subtitle': 'subtitle', - 'is_private': 'isPrivate', - 'description': 'description', - 'publish_time': 'publishTime', - 'provenance_sources': 'provenanceSources', - 'update_mask': 'updateMask' - } - - def __init__(self, title=None, subtitle=None, is_private=True, description='', publish_time=None, provenance_sources='', update_mask=None): # noqa: E501 - """ModelUpdateRequest - a model defined in Swagger""" # noqa: E501 - - self._title = None - self._subtitle = None - self._is_private = None - self._description = None - self._publish_time = None - self._provenance_sources = None - self._update_mask = None - self.discriminator = None - - if title is not None: - self.title = title - if subtitle is not None: - self.subtitle = subtitle - if is_private is not None: - self.is_private = is_private - if description is not None: - self.description = description - if publish_time is not None: - self.publish_time = publish_time - if provenance_sources is not None: - self.provenance_sources = provenance_sources - if update_mask is not None: - self.update_mask = update_mask - - @property - def title(self): - """Gets the title of this ModelUpdateRequest. # noqa: E501 - - The title of the new model # noqa: E501 - - :return: The title of this ModelUpdateRequest. # noqa: E501 - :rtype: str - """ - return self._title - - @title.setter - def title(self, title): - """Sets the title of this ModelUpdateRequest. - - The title of the new model # noqa: E501 - - :param title: The title of this ModelUpdateRequest. # noqa: E501 - :type: str - """ - - self._title = title - - @property - def subtitle(self): - """Gets the subtitle of this ModelUpdateRequest. # noqa: E501 - - The subtitle of the new model # noqa: E501 - - :return: The subtitle of this ModelUpdateRequest. # noqa: E501 - :rtype: str - """ - return self._subtitle - - @subtitle.setter - def subtitle(self, subtitle): - """Sets the subtitle of this ModelUpdateRequest. - - The subtitle of the new model # noqa: E501 - - :param subtitle: The subtitle of this ModelUpdateRequest. # noqa: E501 - :type: str - """ - - self._subtitle = subtitle - - @property - def is_private(self): - """Gets the is_private of this ModelUpdateRequest. # noqa: E501 - - Whether or not the model should be private # noqa: E501 - - :return: The is_private of this ModelUpdateRequest. # noqa: E501 - :rtype: bool - """ - return self._is_private - - @is_private.setter - def is_private(self, is_private): - """Sets the is_private of this ModelUpdateRequest. - - Whether or not the model should be private # noqa: E501 - - :param is_private: The is_private of this ModelUpdateRequest. # noqa: E501 - :type: bool - """ - - self._is_private = is_private - - @property - def description(self): - """Gets the description of this ModelUpdateRequest. # noqa: E501 - - The description to be set on the model # noqa: E501 - - :return: The description of this ModelUpdateRequest. # noqa: E501 - :rtype: str - """ - return self._description - - @description.setter - def description(self, description): - """Sets the description of this ModelUpdateRequest. - - The description to be set on the model # noqa: E501 - - :param description: The description of this ModelUpdateRequest. # noqa: E501 - :type: str - """ - - self._description = description - - @property - def publish_time(self): - """Gets the publish_time of this ModelUpdateRequest. # noqa: E501 - - When the model was initially published # noqa: E501 - - :return: The publish_time of this ModelUpdateRequest. # noqa: E501 - :rtype: date - """ - return self._publish_time - - @publish_time.setter - def publish_time(self, publish_time): - """Sets the publish_time of this ModelUpdateRequest. - - When the model was initially published # noqa: E501 - - :param publish_time: The publish_time of this ModelUpdateRequest. # noqa: E501 - :type: date - """ - - self._publish_time = publish_time - - @property - def provenance_sources(self): - """Gets the provenance_sources of this ModelUpdateRequest. # noqa: E501 - - The provenance sources to be set on the model # noqa: E501 - - :return: The provenance_sources of this ModelUpdateRequest. # noqa: E501 - :rtype: str - """ - return self._provenance_sources - - @provenance_sources.setter - def provenance_sources(self, provenance_sources): - """Sets the provenance_sources of this ModelUpdateRequest. - - The provenance sources to be set on the model # noqa: E501 - - :param provenance_sources: The provenance_sources of this ModelUpdateRequest. # noqa: E501 - :type: str - """ - - self._provenance_sources = provenance_sources - - @property - def update_mask(self): - """Gets the update_mask of this ModelUpdateRequest. # noqa: E501 - - Describes which fields to update # noqa: E501 - - :return: The update_mask of this ModelUpdateRequest. # noqa: E501 - :rtype: str - """ - return self._update_mask - - @update_mask.setter - def update_mask(self, update_mask): - """Sets the update_mask of this ModelUpdateRequest. - - Describes which fields to update # noqa: E501 - - :param update_mask: The update_mask of this ModelUpdateRequest. # noqa: E501 - :type: str - """ - - self._update_mask = update_mask - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, ModelUpdateRequest): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/kaggle/models/result.py b/kaggle/models/result.py deleted file mode 100644 index 3cf8bfe..0000000 --- a/kaggle/models/result.py +++ /dev/null @@ -1,100 +0,0 @@ -#!/usr/bin/python -# -# Copyright 2024 Kaggle Inc -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 - -""" - Kaggle API - - API for kaggle.com # noqa: E501 - - OpenAPI spec version: 1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - - -import pprint -import re # noqa: F401 - -import six - - -class Result(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - - """ - Attributes: - swagger_types (dict): The key is attribute name - and the value is attribute type. - attribute_map (dict): The key is attribute name - and the value is json key in definition. - """ - swagger_types = { - } - - attribute_map = { - } - - def __init__(self): # noqa: E501 - """Result - a model defined in Swagger""" # noqa: E501 - self.discriminator = None - - def to_dict(self): - """Returns the model properties as a dict""" - result = {} - - for attr, _ in six.iteritems(self.swagger_types): - value = getattr(self, attr) - if isinstance(value, list): - result[attr] = list(map( - lambda x: x.to_dict() if hasattr(x, "to_dict") else x, - value - )) - elif hasattr(value, "to_dict"): - result[attr] = value.to_dict() - elif isinstance(value, dict): - result[attr] = dict(map( - lambda item: (item[0], item[1].to_dict()) - if hasattr(item[1], "to_dict") else item, - value.items() - )) - else: - result[attr] = value - - return result - - def to_str(self): - """Returns the string representation of the model""" - return pprint.pformat(self.to_dict()) - - def __repr__(self): - """For `print` and `pprint`""" - return self.to_str() - - def __eq__(self, other): - """Returns true if both objects are equal""" - if not isinstance(other, Result): - return False - - return self.__dict__ == other.__dict__ - - def __ne__(self, other): - """Returns true if both objects are not equal""" - return not self == other diff --git a/kaggle/models/upload_file.py b/kaggle/models/upload_file.py index 61a7d83..98aa8ec 100644 --- a/kaggle/models/upload_file.py +++ b/kaggle/models/upload_file.py @@ -1,31 +1,20 @@ -#!/usr/bin/python -# -# Copyright 2024 Kaggle Inc -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 - -""" - Kaggle API - - API for kaggle.com # noqa: E501 - - OpenAPI spec version: 1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" +#!/usr/bin/python +# +# Copyright 2024 Kaggle Inc +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# coding: utf-8 import pprint import re # noqa: F401 @@ -36,19 +25,14 @@ class UploadFile(object): - """NOTE: This class is auto generated by the swagger code generator program. - - Do not edit the class manually. - """ - """ Attributes: - swagger_types (dict): The key is attribute name + column_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ - swagger_types = { + column_types = { 'token': 'str', 'description': 'str', 'columns': 'list[DatasetColumn]' @@ -148,7 +132,7 @@ def to_dict(self): """Returns the model properties as a dict""" result = {} - for attr, _ in six.iteritems(self.swagger_types): + for attr, _ in six.iteritems(self.column_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( diff --git a/kaggle/rest.py b/kaggle/rest.py deleted file mode 100644 index d8e26ab..0000000 --- a/kaggle/rest.py +++ /dev/null @@ -1,336 +0,0 @@ -#!/usr/bin/python -# -# Copyright 2024 Kaggle Inc -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# coding: utf-8 - -""" - Kaggle API - - API for kaggle.com # noqa: E501 - - OpenAPI spec version: 1 - - Generated by: https://github.com/swagger-api/swagger-codegen.git -""" - - -from __future__ import absolute_import - -import io -import json -import logging -import re -import ssl - -import certifi -# python 2 and python 3 compatibility library -import six -from six.moves.urllib.parse import urlencode - -try: - import urllib3 -except ImportError: - raise ImportError('Swagger python client requires urllib3.') - - -logger = logging.getLogger(__name__) - - -class RESTResponse(io.IOBase): - - def __init__(self, resp): - self.urllib3_response = resp - self.status = resp.status - self.reason = resp.reason - self.data = resp.data - - def getheaders(self): - """Returns a dictionary of the response headers.""" - return self.urllib3_response.getheaders() - - def getheader(self, name, default=None): - """Returns a given response header.""" - return self.urllib3_response.getheader(name, default) - - -class RESTClientObject(object): - - def __init__(self, configuration, pools_size=4, maxsize=None): - # urllib3.PoolManager will pass all kw parameters to connectionpool - # https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/poolmanager.py#L75 # noqa: E501 - # https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/connectionpool.py#L680 # noqa: E501 - # maxsize is the number of requests to host that are allowed in parallel # noqa: E501 - # Custom SSL certificates and client certificates: http://urllib3.readthedocs.io/en/latest/advanced-usage.html # noqa: E501 - - # cert_reqs - if configuration.verify_ssl: - cert_reqs = ssl.CERT_REQUIRED - else: - cert_reqs = ssl.CERT_NONE - - # ca_certs - if configuration.ssl_ca_cert: - ca_certs = configuration.ssl_ca_cert - else: - # if not set certificate file, use Mozilla's root certificates. - ca_certs = certifi.where() - - addition_pool_args = {} - if configuration.assert_hostname is not None: - addition_pool_args['assert_hostname'] = configuration.assert_hostname # noqa: E501 - - if maxsize is None: - if configuration.connection_pool_maxsize is not None: - maxsize = configuration.connection_pool_maxsize - else: - maxsize = 4 - - # https pool manager - if configuration.proxy: - self.pool_manager = urllib3.ProxyManager( - num_pools=pools_size, - maxsize=maxsize, - cert_reqs=cert_reqs, - ca_certs=ca_certs, - cert_file=configuration.cert_file, - key_file=configuration.key_file, - proxy_url=configuration.proxy, - **addition_pool_args - ) - else: - self.pool_manager = urllib3.PoolManager( - num_pools=pools_size, - maxsize=maxsize, - cert_reqs=cert_reqs, - ca_certs=ca_certs, - cert_file=configuration.cert_file, - key_file=configuration.key_file, - **addition_pool_args - ) - - def request(self, method, url, query_params=None, headers=None, - body=None, post_params=None, _preload_content=True, - _request_timeout=None): - """Perform requests. - - :param method: http request method - :param url: http request url - :param query_params: query parameters in the url - :param headers: http request headers - :param body: request json body, for `application/json` - :param post_params: request post parameters, - `application/x-www-form-urlencoded` - and `multipart/form-data` - :param _preload_content: if False, the urllib3.HTTPResponse object will - be returned without reading/decoding response - data. Default is True. - :param _request_timeout: timeout setting for this request. If one - number provided, it will be total request - timeout. It can also be a pair (tuple) of - (connection, read) timeouts. - """ - method = method.upper() - assert method in ['GET', 'HEAD', 'DELETE', 'POST', 'PUT', - 'PATCH', 'OPTIONS'] - - if post_params and body: - raise ValueError( - "body parameter cannot be used with post_params parameter." - ) - - post_params = post_params or {} - headers = headers or {} - - timeout = None - if _request_timeout: - if isinstance(_request_timeout, (int, ) if six.PY3 else (int, long)): # noqa: E501,F821 - timeout = urllib3.Timeout(total=_request_timeout) - elif (isinstance(_request_timeout, tuple) and - len(_request_timeout) == 2): - timeout = urllib3.Timeout( - connect=_request_timeout[0], read=_request_timeout[1]) - - try: - # For `POST`, `PUT`, `PATCH`, `OPTIONS`, `DELETE` - if method in ['POST', 'PUT', 'PATCH', 'OPTIONS', 'DELETE']: - if query_params: - url += '?' + urlencode(query_params) - if re.search('json', headers['Content-Type'], re.IGNORECASE): - request_body = None - if body is not None: - request_body = json.dumps(body) - r = self.pool_manager.request( - method, url, - body=request_body, - preload_content=_preload_content, - timeout=timeout, - headers=headers) - elif headers['Content-Type'] == 'application/x-www-form-urlencoded': # noqa: E501 - r = self.pool_manager.request( - method, url, - fields=post_params, - encode_multipart=False, - preload_content=_preload_content, - timeout=timeout, - headers=headers) - elif headers['Content-Type'] == 'multipart/form-data': - # must del headers['Content-Type'], or the correct - # Content-Type which generated by urllib3 will be - # overwritten. - del headers['Content-Type'] - r = self.pool_manager.request( - method, url, - fields=post_params, - encode_multipart=True, - preload_content=_preload_content, - timeout=timeout, - headers=headers) - # Pass a `string` parameter directly in the body to support - # other content types than Json when `body` argument is - # provided in serialized form - elif isinstance(body, str): - request_body = body - r = self.pool_manager.request( - method, url, - body=request_body, - preload_content=_preload_content, - timeout=timeout, - headers=headers) - else: - # Cannot generate the request from given parameters - msg = """Cannot prepare a request message for provided - arguments. Please check that your arguments match - declared content type.""" - raise ApiException(status=0, reason=msg) - # For `GET`, `HEAD` - else: - r = self.pool_manager.request(method, url, - fields=query_params, - preload_content=_preload_content, - timeout=timeout, - headers=headers) - except urllib3.exceptions.SSLError as e: - msg = "{0}\n{1}".format(type(e).__name__, str(e)) - raise ApiException(status=0, reason=msg) - - if _preload_content: - r = RESTResponse(r) - - # In the python 3, the response.data is bytes. - # we need to decode it to string. - if six.PY3: - r.data = r.data.decode('utf8') - - # log response body - logger.debug("response body: %s", r.data) - - if not 200 <= r.status <= 299: - raise ApiException(http_resp=r) - - return r - - def GET(self, url, headers=None, query_params=None, _preload_content=True, - _request_timeout=None): - return self.request("GET", url, - headers=headers, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - query_params=query_params) - - def HEAD(self, url, headers=None, query_params=None, _preload_content=True, - _request_timeout=None): - return self.request("HEAD", url, - headers=headers, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - query_params=query_params) - - def OPTIONS(self, url, headers=None, query_params=None, post_params=None, - body=None, _preload_content=True, _request_timeout=None): - return self.request("OPTIONS", url, - headers=headers, - query_params=query_params, - post_params=post_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body) - - def DELETE(self, url, headers=None, query_params=None, body=None, - _preload_content=True, _request_timeout=None): - return self.request("DELETE", url, - headers=headers, - query_params=query_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body) - - def POST(self, url, headers=None, query_params=None, post_params=None, - body=None, _preload_content=True, _request_timeout=None): - return self.request("POST", url, - headers=headers, - query_params=query_params, - post_params=post_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body) - - def PUT(self, url, headers=None, query_params=None, post_params=None, - body=None, _preload_content=True, _request_timeout=None): - return self.request("PUT", url, - headers=headers, - query_params=query_params, - post_params=post_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body) - - def PATCH(self, url, headers=None, query_params=None, post_params=None, - body=None, _preload_content=True, _request_timeout=None): - return self.request("PATCH", url, - headers=headers, - query_params=query_params, - post_params=post_params, - _preload_content=_preload_content, - _request_timeout=_request_timeout, - body=body) - - -class ApiException(Exception): - - def __init__(self, status=None, reason=None, http_resp=None): - if http_resp: - self.status = http_resp.status - self.reason = http_resp.reason - self.body = http_resp.data - self.headers = http_resp.getheaders() - else: - self.status = status - self.reason = reason - self.body = None - self.headers = None - - def __str__(self): - """Custom error messages for exception""" - error_message = "({0})\n"\ - "Reason: {1}\n".format(self.status, self.reason) - if self.headers: - error_message += "HTTP response headers: {0}\n".format( - self.headers) - - if self.body: - error_message += "HTTP response body: {0}\n".format(self.body) - - return error_message diff --git a/kagglesdk/admin/__init__.py b/kagglesdk/admin/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/kagglesdk/admin/services/__init__.py b/kagglesdk/admin/services/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/kagglesdk/admin/services/inbox_file_service.py b/kagglesdk/admin/services/inbox_file_service.py new file mode 100644 index 0000000..6a83bf5 --- /dev/null +++ b/kagglesdk/admin/services/inbox_file_service.py @@ -0,0 +1,22 @@ +from kagglesdk.admin.types.inbox_file_service import CreateInboxFileRequest, CreateInboxFileResponse +from kagglesdk.kaggle_http_client import KaggleHttpClient + +class InboxFileClient(object): + """File drop/pickup functionality.""" + + def __init__(self, client: KaggleHttpClient): + self._client = client + + def create_inbox_file(self, request: CreateInboxFileRequest = None) -> CreateInboxFileResponse: + r""" + Creates (aka 'drops') a new file into the inbox. + + Args: + request (CreateInboxFileRequest): + The request object; initialized to empty instance if not specified. + """ + + if request is None: + request = CreateInboxFileRequest() + + return self._client.call("admin.InboxFileService", "CreateInboxFile", request, CreateInboxFileResponse) diff --git a/kagglesdk/admin/types/__init__.py b/kagglesdk/admin/types/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/kagglesdk/admin/types/inbox_file_service.py b/kagglesdk/admin/types/inbox_file_service.py new file mode 100644 index 0000000..3dcb8f6 --- /dev/null +++ b/kagglesdk/admin/types/inbox_file_service.py @@ -0,0 +1,74 @@ +from kagglesdk.kaggle_object import * + +class CreateInboxFileRequest(KaggleObject): + r""" + Attributes: + virtual_directory (str) + Directory name used for tagging the uploaded file. + blob_file_token (str) + Token representing the uploaded file. + """ + + def __init__(self): + self._virtual_directory = "" + self._blob_file_token = "" + self._freeze() + + @property + def virtual_directory(self) -> str: + """Directory name used for tagging the uploaded file.""" + return self._virtual_directory + + @virtual_directory.setter + def virtual_directory(self, virtual_directory: str): + if virtual_directory is None: + del self.virtual_directory + return + if not isinstance(virtual_directory, str): + raise TypeError('virtual_directory must be of type str') + self._virtual_directory = virtual_directory + + @property + def blob_file_token(self) -> str: + """Token representing the uploaded file.""" + return self._blob_file_token + + @blob_file_token.setter + def blob_file_token(self, blob_file_token: str): + if blob_file_token is None: + del self.blob_file_token + return + if not isinstance(blob_file_token, str): + raise TypeError('blob_file_token must be of type str') + self._blob_file_token = blob_file_token + + + def endpoint(self): + path = '/api/v1/inbox/files/create' + return path.format_map(self.to_field_map(self)) + + + @staticmethod + def method(): + return 'POST' + + @staticmethod + def body_fields(): + return '*' + +class CreateInboxFileResponse(KaggleObject): + r""" + NOTE: This is sent to non-admins, so we're intentionally *NOT* sending back + the full InboxFile (with its URL for a direct download). + + """ + + pass + +CreateInboxFileRequest._fields = [ + FieldMetadata("virtualDirectory", "virtual_directory", "_virtual_directory", str, "", PredefinedSerializer()), + FieldMetadata("blobFileToken", "blob_file_token", "_blob_file_token", str, "", PredefinedSerializer()), +] + +CreateInboxFileResponse._fields = [] + diff --git a/kagglesdk/blobs/__init__.py b/kagglesdk/blobs/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/kagglesdk/blobs/services/__init__.py b/kagglesdk/blobs/services/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/kagglesdk/blobs/services/blob_api_service.py b/kagglesdk/blobs/services/blob_api_service.py new file mode 100644 index 0000000..b05ec57 --- /dev/null +++ b/kagglesdk/blobs/services/blob_api_service.py @@ -0,0 +1,25 @@ +from kagglesdk.blobs.types.blob_api_service import ApiStartBlobUploadRequest, ApiStartBlobUploadResponse +from kagglesdk.kaggle_http_client import KaggleHttpClient + +class BlobApiClient(object): + r""" + Binary Large OBject (BLOB) service used for uploading files to Google Cloud + Storage (GCS). + """ + + def __init__(self, client: KaggleHttpClient): + self._client = client + + def start_blob_upload(self, request: ApiStartBlobUploadRequest = None) -> ApiStartBlobUploadResponse: + r""" + Starts a blob upload (i.e. reserves a spot for the upload on GCS). + + Args: + request (ApiStartBlobUploadRequest): + The request object; initialized to empty instance if not specified. + """ + + if request is None: + request = ApiStartBlobUploadRequest() + + return self._client.call("blobs.BlobApiService", "ApiStartBlobUpload", request, ApiStartBlobUploadResponse) diff --git a/kagglesdk/blobs/types/__init__.py b/kagglesdk/blobs/types/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/kagglesdk/blobs/types/blob_api_service.py b/kagglesdk/blobs/types/blob_api_service.py new file mode 100644 index 0000000..c691a3c --- /dev/null +++ b/kagglesdk/blobs/types/blob_api_service.py @@ -0,0 +1,173 @@ +import enum +from kagglesdk.kaggle_object import * +from typing import Optional + +class ApiBlobType(enum.Enum): + API_BLOB_TYPE_UNSPECIFIED = 0 + DATASET = 1 + MODEL = 2 + INBOX = 3 + +class ApiStartBlobUploadRequest(KaggleObject): + r""" + Attributes: + type (ApiBlobType) + The type of the blob. + name (str) + Name (e.g. file name) of the blob. + content_type (str) + Content/MIME type (e.g. 'text/plain'). + content_length (int) + Size in bytes of the blob. + last_modified_epoch_seconds (int) + Optional user-reported time when the blob was last updated/modified. + """ + + def __init__(self): + self._type = ApiBlobType.API_BLOB_TYPE_UNSPECIFIED + self._name = "" + self._content_type = None + self._content_length = 0 + self._last_modified_epoch_seconds = None + self._freeze() + + @property + def type(self) -> 'ApiBlobType': + """The type of the blob.""" + return self._type + + @type.setter + def type(self, type: 'ApiBlobType'): + if type is None: + del self.type + return + if not isinstance(type, ApiBlobType): + raise TypeError('type must be of type ApiBlobType') + self._type = type + + @property + def name(self) -> str: + """Name (e.g. file name) of the blob.""" + return self._name + + @name.setter + def name(self, name: str): + if name is None: + del self.name + return + if not isinstance(name, str): + raise TypeError('name must be of type str') + self._name = name + + @property + def content_type(self) -> str: + """Content/MIME type (e.g. 'text/plain').""" + return self._content_type or "" + + @content_type.setter + def content_type(self, content_type: str): + if content_type is None: + del self.content_type + return + if not isinstance(content_type, str): + raise TypeError('content_type must be of type str') + self._content_type = content_type + + @property + def content_length(self) -> int: + """Size in bytes of the blob.""" + return self._content_length + + @content_length.setter + def content_length(self, content_length: int): + if content_length is None: + del self.content_length + return + if not isinstance(content_length, int): + raise TypeError('content_length must be of type int') + self._content_length = content_length + + @property + def last_modified_epoch_seconds(self) -> int: + """Optional user-reported time when the blob was last updated/modified.""" + return self._last_modified_epoch_seconds or 0 + + @last_modified_epoch_seconds.setter + def last_modified_epoch_seconds(self, last_modified_epoch_seconds: int): + if last_modified_epoch_seconds is None: + del self.last_modified_epoch_seconds + return + if not isinstance(last_modified_epoch_seconds, int): + raise TypeError('last_modified_epoch_seconds must be of type int') + self._last_modified_epoch_seconds = last_modified_epoch_seconds + + + def endpoint(self): + path = '/api/v1/blobs/upload' + return path.format_map(self.to_field_map(self)) + + + @staticmethod + def method(): + return 'POST' + + @staticmethod + def body_fields(): + return '*' + +class ApiStartBlobUploadResponse(KaggleObject): + r""" + Attributes: + token (str) + Opaque string token used to reference the new blob/file. + create_url (str) + URL to use to start the upload. + """ + + def __init__(self): + self._token = "" + self._create_url = "" + self._freeze() + + @property + def token(self) -> str: + """Opaque string token used to reference the new blob/file.""" + return self._token + + @token.setter + def token(self, token: str): + if token is None: + del self.token + return + if not isinstance(token, str): + raise TypeError('token must be of type str') + self._token = token + + @property + def create_url(self) -> str: + """URL to use to start the upload.""" + return self._create_url + + @create_url.setter + def create_url(self, create_url: str): + if create_url is None: + del self.create_url + return + if not isinstance(create_url, str): + raise TypeError('create_url must be of type str') + self._create_url = create_url + + +ApiStartBlobUploadRequest._fields = [ + FieldMetadata("type", "type", "_type", ApiBlobType, ApiBlobType.API_BLOB_TYPE_UNSPECIFIED, EnumSerializer()), + FieldMetadata("name", "name", "_name", str, "", PredefinedSerializer()), + FieldMetadata("contentType", "content_type", "_content_type", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("contentLength", "content_length", "_content_length", int, 0, PredefinedSerializer()), + FieldMetadata("lastModifiedEpochSeconds", "last_modified_epoch_seconds", "_last_modified_epoch_seconds", int, None, PredefinedSerializer(), optional=True), +] + +ApiStartBlobUploadResponse._fields = [ + FieldMetadata("token", "token", "_token", str, "", PredefinedSerializer()), + FieldMetadata("createUrl", "create_url", "_create_url", str, "", PredefinedSerializer()), +] + diff --git a/kagglesdk/datasets/types/dataset_types.py b/kagglesdk/datasets/types/dataset_types.py index f3938c0..36016fc 100644 --- a/kagglesdk/datasets/types/dataset_types.py +++ b/kagglesdk/datasets/types/dataset_types.py @@ -540,17 +540,19 @@ class DatasetCollaborator(KaggleObject): r""" Attributes: username (str) + group_slug (str) role (CollaboratorType) """ def __init__(self): - self._username = "" + self._username = None + self._group_slug = None self._role = CollaboratorType.COLLABORATOR_TYPE_UNSPECIFIED self._freeze() @property def username(self) -> str: - return self._username + return self._username or "" @username.setter def username(self, username: str): @@ -559,8 +561,23 @@ def username(self, username: str): return if not isinstance(username, str): raise TypeError('username must be of type str') + del self.group_slug self._username = username + @property + def group_slug(self) -> str: + return self._group_slug or "" + + @group_slug.setter + def group_slug(self, group_slug: str): + if group_slug is None: + del self.group_slug + return + if not isinstance(group_slug, str): + raise TypeError('group_slug must be of type str') + del self.username + self._group_slug = group_slug + @property def role(self) -> 'CollaboratorType': return self._role @@ -622,7 +639,8 @@ def role(self, role: 'CollaboratorType'): ] DatasetCollaborator._fields = [ - FieldMetadata("username", "username", "_username", str, "", PredefinedSerializer()), + FieldMetadata("username", "username", "_username", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("groupSlug", "group_slug", "_group_slug", str, None, PredefinedSerializer(), optional=True), FieldMetadata("role", "role", "_role", CollaboratorType, CollaboratorType.COLLABORATOR_TYPE_UNSPECIFIED, EnumSerializer()), ] diff --git a/kagglesdk/education/__init__.py b/kagglesdk/education/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/kagglesdk/education/services/__init__.py b/kagglesdk/education/services/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/kagglesdk/education/services/education_api_service.py b/kagglesdk/education/services/education_api_service.py new file mode 100644 index 0000000..4697ea8 --- /dev/null +++ b/kagglesdk/education/services/education_api_service.py @@ -0,0 +1,19 @@ +from kagglesdk.education.types.education_api_service import ApiTrackExerciseInteractionRequest, ApiTrackExerciseInteractionResponse +from kagglesdk.kaggle_http_client import KaggleHttpClient + +class EducationApiClient(object): + + def __init__(self, client: KaggleHttpClient): + self._client = client + + def track_exercise_interaction(self, request: ApiTrackExerciseInteractionRequest = None) -> ApiTrackExerciseInteractionResponse: + r""" + Args: + request (ApiTrackExerciseInteractionRequest): + The request object; initialized to empty instance if not specified. + """ + + if request is None: + request = ApiTrackExerciseInteractionRequest() + + return self._client.call("education.EducationApiService", "ApiTrackExerciseInteraction", request, ApiTrackExerciseInteractionResponse) diff --git a/kagglesdk/education/types/__init__.py b/kagglesdk/education/types/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/kagglesdk/education/types/education_api_service.py b/kagglesdk/education/types/education_api_service.py new file mode 100644 index 0000000..79c4e90 --- /dev/null +++ b/kagglesdk/education/types/education_api_service.py @@ -0,0 +1,244 @@ +from kagglesdk.education.types.education_service import LearnExerciseInteractionType, LearnExerciseOutcomeType, LearnExerciseQuestionType, LearnNudge +from kagglesdk.kaggle_object import * +from typing import Optional + +class ApiTrackExerciseInteractionRequest(KaggleObject): + r""" + This is copied from TrackExerciseInteractionRequest in + education_service.proto, which will eventually be deprecated. In the + meantime, make sure to keep these in sync. + + NOTE: there's one small rename from `fork_parent_script_version_id` to + `fork_parent_kernel_session_id`. + + Attributes: + exception_class (str) + failure_message (str) + interaction_type (LearnExerciseInteractionType) + learn_tools_version (str) + fork_parent_kernel_session_id (int) + outcome_type (LearnExerciseOutcomeType) + question_id (str) + question_type (LearnExerciseQuestionType) + trace (str) + value_towards_completion (float) + """ + + def __init__(self): + self._exception_class = "" + self._failure_message = "" + self._interaction_type = LearnExerciseInteractionType.LEARN_EXERCISE_INTERACTION_TYPE_UNSPECIFIED + self._learn_tools_version = "" + self._fork_parent_kernel_session_id = 0 + self._outcome_type = LearnExerciseOutcomeType.LEARN_EXERCISE_OUTCOME_TYPE_UNSPECIFIED + self._question_id = "" + self._question_type = LearnExerciseQuestionType.LEARN_EXERCISE_QUESTION_TYPE_UNSPECIFIED + self._trace = "" + self._value_towards_completion = None + self._freeze() + + @property + def exception_class(self) -> str: + return self._exception_class + + @exception_class.setter + def exception_class(self, exception_class: str): + if exception_class is None: + del self.exception_class + return + if not isinstance(exception_class, str): + raise TypeError('exception_class must be of type str') + self._exception_class = exception_class + + @property + def failure_message(self) -> str: + return self._failure_message + + @failure_message.setter + def failure_message(self, failure_message: str): + if failure_message is None: + del self.failure_message + return + if not isinstance(failure_message, str): + raise TypeError('failure_message must be of type str') + self._failure_message = failure_message + + @property + def interaction_type(self) -> 'LearnExerciseInteractionType': + return self._interaction_type + + @interaction_type.setter + def interaction_type(self, interaction_type: 'LearnExerciseInteractionType'): + if interaction_type is None: + del self.interaction_type + return + if not isinstance(interaction_type, LearnExerciseInteractionType): + raise TypeError('interaction_type must be of type LearnExerciseInteractionType') + self._interaction_type = interaction_type + + @property + def learn_tools_version(self) -> str: + return self._learn_tools_version + + @learn_tools_version.setter + def learn_tools_version(self, learn_tools_version: str): + if learn_tools_version is None: + del self.learn_tools_version + return + if not isinstance(learn_tools_version, str): + raise TypeError('learn_tools_version must be of type str') + self._learn_tools_version = learn_tools_version + + @property + def fork_parent_kernel_session_id(self) -> int: + return self._fork_parent_kernel_session_id + + @fork_parent_kernel_session_id.setter + def fork_parent_kernel_session_id(self, fork_parent_kernel_session_id: int): + if fork_parent_kernel_session_id is None: + del self.fork_parent_kernel_session_id + return + if not isinstance(fork_parent_kernel_session_id, int): + raise TypeError('fork_parent_kernel_session_id must be of type int') + self._fork_parent_kernel_session_id = fork_parent_kernel_session_id + + @property + def outcome_type(self) -> 'LearnExerciseOutcomeType': + return self._outcome_type + + @outcome_type.setter + def outcome_type(self, outcome_type: 'LearnExerciseOutcomeType'): + if outcome_type is None: + del self.outcome_type + return + if not isinstance(outcome_type, LearnExerciseOutcomeType): + raise TypeError('outcome_type must be of type LearnExerciseOutcomeType') + self._outcome_type = outcome_type + + @property + def question_id(self) -> str: + return self._question_id + + @question_id.setter + def question_id(self, question_id: str): + if question_id is None: + del self.question_id + return + if not isinstance(question_id, str): + raise TypeError('question_id must be of type str') + self._question_id = question_id + + @property + def question_type(self) -> 'LearnExerciseQuestionType': + return self._question_type + + @question_type.setter + def question_type(self, question_type: 'LearnExerciseQuestionType'): + if question_type is None: + del self.question_type + return + if not isinstance(question_type, LearnExerciseQuestionType): + raise TypeError('question_type must be of type LearnExerciseQuestionType') + self._question_type = question_type + + @property + def trace(self) -> str: + return self._trace + + @trace.setter + def trace(self, trace: str): + if trace is None: + del self.trace + return + if not isinstance(trace, str): + raise TypeError('trace must be of type str') + self._trace = trace + + @property + def value_towards_completion(self) -> float: + return self._value_towards_completion or 0.0 + + @value_towards_completion.setter + def value_towards_completion(self, value_towards_completion: float): + if value_towards_completion is None: + del self.value_towards_completion + return + if not isinstance(value_towards_completion, float): + raise TypeError('value_towards_completion must be of type float') + self._value_towards_completion = value_towards_completion + + + def endpoint(self): + path = '/api/v1/learn/track' + return path.format_map(self.to_field_map(self)) + + + @staticmethod + def method(): + return 'POST' + + @staticmethod + def body_fields(): + return '*' + +class ApiTrackExerciseInteractionResponse(KaggleObject): + r""" + This is copied from TrackExerciseInteractionResponse in + education_service.proto, which will eventually be deprecated. In the + meantime, make sure to keep these in sync. + + Attributes: + nudge (LearnNudge) + show_login_prompt (bool) + """ + + def __init__(self): + self._nudge = None + self._show_login_prompt = False + self._freeze() + + @property + def nudge(self) -> Optional['LearnNudge']: + return self._nudge + + @nudge.setter + def nudge(self, nudge: Optional['LearnNudge']): + if nudge is None: + del self.nudge + return + if not isinstance(nudge, LearnNudge): + raise TypeError('nudge must be of type LearnNudge') + self._nudge = nudge + + @property + def show_login_prompt(self) -> bool: + return self._show_login_prompt + + @show_login_prompt.setter + def show_login_prompt(self, show_login_prompt: bool): + if show_login_prompt is None: + del self.show_login_prompt + return + if not isinstance(show_login_prompt, bool): + raise TypeError('show_login_prompt must be of type bool') + self._show_login_prompt = show_login_prompt + + +ApiTrackExerciseInteractionRequest._fields = [ + FieldMetadata("exceptionClass", "exception_class", "_exception_class", str, "", PredefinedSerializer()), + FieldMetadata("failureMessage", "failure_message", "_failure_message", str, "", PredefinedSerializer()), + FieldMetadata("interactionType", "interaction_type", "_interaction_type", LearnExerciseInteractionType, LearnExerciseInteractionType.LEARN_EXERCISE_INTERACTION_TYPE_UNSPECIFIED, EnumSerializer()), + FieldMetadata("learnToolsVersion", "learn_tools_version", "_learn_tools_version", str, "", PredefinedSerializer()), + FieldMetadata("forkParentKernelSessionId", "fork_parent_kernel_session_id", "_fork_parent_kernel_session_id", int, 0, PredefinedSerializer()), + FieldMetadata("outcomeType", "outcome_type", "_outcome_type", LearnExerciseOutcomeType, LearnExerciseOutcomeType.LEARN_EXERCISE_OUTCOME_TYPE_UNSPECIFIED, EnumSerializer()), + FieldMetadata("questionId", "question_id", "_question_id", str, "", PredefinedSerializer()), + FieldMetadata("questionType", "question_type", "_question_type", LearnExerciseQuestionType, LearnExerciseQuestionType.LEARN_EXERCISE_QUESTION_TYPE_UNSPECIFIED, EnumSerializer()), + FieldMetadata("trace", "trace", "_trace", str, "", PredefinedSerializer()), + FieldMetadata("valueTowardsCompletion", "value_towards_completion", "_value_towards_completion", float, None, PredefinedSerializer(), optional=True), +] + +ApiTrackExerciseInteractionResponse._fields = [ + FieldMetadata("nudge", "nudge", "_nudge", LearnNudge, None, KaggleObjectSerializer()), + FieldMetadata("showLoginPrompt", "show_login_prompt", "_show_login_prompt", bool, False, PredefinedSerializer()), +] + diff --git a/kagglesdk/education/types/education_service.py b/kagglesdk/education/types/education_service.py new file mode 100644 index 0000000..4f5f2ad --- /dev/null +++ b/kagglesdk/education/types/education_service.py @@ -0,0 +1,139 @@ +import enum +from kagglesdk.kaggle_object import * +from typing import Optional + +class LearnExerciseInteractionType(enum.Enum): + LEARN_EXERCISE_INTERACTION_TYPE_UNSPECIFIED = 0 + CHECK = 1 + HINT = 2 + SOLUTION = 3 + +class LearnExerciseOutcomeType(enum.Enum): + LEARN_EXERCISE_OUTCOME_TYPE_UNSPECIFIED = 0 + PASS = 1 + FAIL = 2 + EXCEPTION = 3 + UNATTEMPTED = 4 + +class LearnExerciseQuestionType(enum.Enum): + LEARN_EXERCISE_QUESTION_TYPE_UNSPECIFIED = 0 + EQUALITY_CHECK_PROBLEM = 1 + CODING_PROBLEM = 2 + FUNCTION_PROBLEM = 3 + THOUGHT_EXPERIMENT = 4 + +class LearnNudgeType(enum.Enum): + COURSE_COMPLETE_NO_BONUS_LESSONS = 0 + COURSE_COMPLETE_WITH_BONUS_LESSONS = 1 + COURSE_INCOMPLETE = 2 + DO_EXERCISE = 3 + DO_TUTORIAL = 4 + +class LearnNudge(KaggleObject): + r""" + Attributes: + course_index (int) + course_name (str) + course_slug (str) + next_item_name (str) + next_item_url (str) + next_item_type (LearnNudgeType) + """ + + def __init__(self): + self._course_index = 0 + self._course_name = "" + self._course_slug = "" + self._next_item_name = "" + self._next_item_url = "" + self._next_item_type = LearnNudgeType.COURSE_COMPLETE_NO_BONUS_LESSONS + self._freeze() + + @property + def course_index(self) -> int: + return self._course_index + + @course_index.setter + def course_index(self, course_index: int): + if course_index is None: + del self.course_index + return + if not isinstance(course_index, int): + raise TypeError('course_index must be of type int') + self._course_index = course_index + + @property + def course_name(self) -> str: + return self._course_name + + @course_name.setter + def course_name(self, course_name: str): + if course_name is None: + del self.course_name + return + if not isinstance(course_name, str): + raise TypeError('course_name must be of type str') + self._course_name = course_name + + @property + def course_slug(self) -> str: + return self._course_slug + + @course_slug.setter + def course_slug(self, course_slug: str): + if course_slug is None: + del self.course_slug + return + if not isinstance(course_slug, str): + raise TypeError('course_slug must be of type str') + self._course_slug = course_slug + + @property + def next_item_name(self) -> str: + return self._next_item_name + + @next_item_name.setter + def next_item_name(self, next_item_name: str): + if next_item_name is None: + del self.next_item_name + return + if not isinstance(next_item_name, str): + raise TypeError('next_item_name must be of type str') + self._next_item_name = next_item_name + + @property + def next_item_url(self) -> str: + return self._next_item_url + + @next_item_url.setter + def next_item_url(self, next_item_url: str): + if next_item_url is None: + del self.next_item_url + return + if not isinstance(next_item_url, str): + raise TypeError('next_item_url must be of type str') + self._next_item_url = next_item_url + + @property + def next_item_type(self) -> 'LearnNudgeType': + return self._next_item_type + + @next_item_type.setter + def next_item_type(self, next_item_type: 'LearnNudgeType'): + if next_item_type is None: + del self.next_item_type + return + if not isinstance(next_item_type, LearnNudgeType): + raise TypeError('next_item_type must be of type LearnNudgeType') + self._next_item_type = next_item_type + + +LearnNudge._fields = [ + FieldMetadata("courseIndex", "course_index", "_course_index", int, 0, PredefinedSerializer()), + FieldMetadata("courseName", "course_name", "_course_name", str, "", PredefinedSerializer()), + FieldMetadata("courseSlug", "course_slug", "_course_slug", str, "", PredefinedSerializer()), + FieldMetadata("nextItemName", "next_item_name", "_next_item_name", str, "", PredefinedSerializer()), + FieldMetadata("nextItemUrl", "next_item_url", "_next_item_url", str, "", PredefinedSerializer()), + FieldMetadata("nextItemType", "next_item_type", "_next_item_type", LearnNudgeType, LearnNudgeType.COURSE_COMPLETE_NO_BONUS_LESSONS, EnumSerializer()), +] + diff --git a/kagglesdk/kaggle_client.py b/kagglesdk/kaggle_client.py index 517c6b9..c4ccf19 100644 --- a/kagglesdk/kaggle_client.py +++ b/kagglesdk/kaggle_client.py @@ -1,8 +1,11 @@ from kagglesdk.kernels.services.kernels_api_service import KernelsApiClient +from kagglesdk.blobs.services.blob_api_service import BlobApiClient +from kagglesdk.education.services.education_api_service import EducationApiClient from kagglesdk.models.services.model_api_service import ModelApiClient from kagglesdk.models.services.model_service import ModelClient from kagglesdk.competitions.services.competition_api_service import CompetitionApiClient from kagglesdk.datasets.services.dataset_api_service import DatasetApiClient +from kagglesdk.admin.services.inbox_file_service import InboxFileClient from kagglesdk.kaggle_env import KaggleEnv from kagglesdk.kaggle_http_client import KaggleHttpClient @@ -12,6 +15,14 @@ class Kernels(object): def __init__(self, http_client: KaggleHttpClient): self.kernels_api_client = KernelsApiClient(http_client) + class Blobs(object): + def __init__(self, http_client: KaggleHttpClient): + self.blob_api_client = BlobApiClient(http_client) + + class Education(object): + def __init__(self, http_client: KaggleHttpClient): + self.education_api_client = EducationApiClient(http_client) + class Models(object): def __init__(self, http_client: KaggleHttpClient): self.model_api_client = ModelApiClient(http_client) @@ -25,15 +36,25 @@ class Datasets(object): def __init__(self, http_client: KaggleHttpClient): self.dataset_api_client = DatasetApiClient(http_client) + class Admin(object): + def __init__(self, http_client: KaggleHttpClient): + self.inbox_file_client = InboxFileClient(http_client) + def __init__(self, env: KaggleEnv = None, verbose: bool = False, username: str = None, password: str = None): self._http_client = http_client = KaggleHttpClient(env, verbose, self._renew_iap_token, username=username, password=password) self.kernels = KaggleClient.Kernels(http_client) + self.blobs = KaggleClient.Blobs(http_client) + self.education = KaggleClient.Education(http_client) self.models = KaggleClient.Models(http_client) self.competitions = KaggleClient.Competitions(http_client) self.datasets = KaggleClient.Datasets(http_client) + self.admin = KaggleClient.Admin(http_client) self.username = username self.password = password + def http_client(self): + return self._http_client + def _renew_iap_token(self): return self.admin.admin_client.renew_iap_token() diff --git a/kagglesdk/kaggle_object.py b/kagglesdk/kaggle_object.py index 7d7d5f0..31d9f2a 100644 --- a/kagglesdk/kaggle_object.py +++ b/kagglesdk/kaggle_object.py @@ -1,4 +1,3 @@ -import enum import json import re from datetime import datetime, timedelta @@ -242,7 +241,6 @@ def __setattr__(self, key, value): raise AttributeError(f'Unknown field for {self.__class__.__name__}: {key}') object.__setattr__(self, key, value) - @staticmethod def to_dict(self, ignore_defaults=True): kv_pairs = [(field.json_name, field.get_as_dict_item(self, ignore_defaults)) for field in self._fields] return {k: v for (k, v) in kv_pairs if not ignore_defaults or v is not None} diff --git a/kagglesdk/kernels/services/kernels_api_service.py b/kagglesdk/kernels/services/kernels_api_service.py index dbccb8e..485b210 100644 --- a/kagglesdk/kernels/services/kernels_api_service.py +++ b/kagglesdk/kernels/services/kernels_api_service.py @@ -1,5 +1,6 @@ +from kagglesdk.common.types.http_redirect import HttpRedirect from kagglesdk.kaggle_http_client import KaggleHttpClient -from kagglesdk.kernels.types.kernels_api_service import ApiGetKernelRequest, ApiGetKernelResponse, ApiGetKernelSessionStatusRequest, ApiGetKernelSessionStatusResponse, ApiListKernelFilesRequest, ApiListKernelFilesResponse, ApiListKernelSessionOutputRequest, ApiListKernelSessionOutputResponse, ApiListKernelsRequest, ApiListKernelsResponse, ApiSaveKernelRequest, ApiSaveKernelResponse +from kagglesdk.kernels.types.kernels_api_service import ApiDownloadKernelOutputRequest, ApiGetKernelRequest, ApiGetKernelResponse, ApiGetKernelSessionStatusRequest, ApiGetKernelSessionStatusResponse, ApiListKernelFilesRequest, ApiListKernelFilesResponse, ApiListKernelSessionOutputRequest, ApiListKernelSessionOutputResponse, ApiListKernelsRequest, ApiListKernelsResponse, ApiSaveKernelRequest, ApiSaveKernelResponse class KernelsApiClient(object): @@ -77,3 +78,17 @@ def get_kernel_session_status(self, request: ApiGetKernelSessionStatusRequest = request = ApiGetKernelSessionStatusRequest() return self._client.call("kernels.KernelsApiService", "ApiGetKernelSessionStatus", request, ApiGetKernelSessionStatusResponse) + + def download_kernel_output(self, request: ApiDownloadKernelOutputRequest = None) -> HttpRedirect: + r""" + Meant for use by Kaggle Hub (http bindings and terminology align with that) + + Args: + request (ApiDownloadKernelOutputRequest): + The request object; initialized to empty instance if not specified. + """ + + if request is None: + request = ApiDownloadKernelOutputRequest() + + return self._client.call("kernels.KernelsApiService", "ApiDownloadKernelOutput", request, HttpRedirect) diff --git a/kagglesdk/kernels/types/kernels_api_service.py b/kagglesdk/kernels/types/kernels_api_service.py index 224a17e..2cf9a12 100644 --- a/kagglesdk/kernels/types/kernels_api_service.py +++ b/kagglesdk/kernels/types/kernels_api_service.py @@ -3,6 +3,88 @@ from kagglesdk.kernels.types.kernels_enums import KernelsListSortType, KernelsListViewType, KernelWorkerStatus from typing import Optional, List +class ApiDownloadKernelOutputRequest(KaggleObject): + r""" + Attributes: + owner_slug (str) + kernel_slug (str) + file_path (str) + Relative path to a specific file inside the databundle. + version_number (int) + """ + + def __init__(self): + self._owner_slug = "" + self._kernel_slug = "" + self._file_path = None + self._version_number = None + self._freeze() + + @property + def owner_slug(self) -> str: + return self._owner_slug + + @owner_slug.setter + def owner_slug(self, owner_slug: str): + if owner_slug is None: + del self.owner_slug + return + if not isinstance(owner_slug, str): + raise TypeError('owner_slug must be of type str') + self._owner_slug = owner_slug + + @property + def kernel_slug(self) -> str: + return self._kernel_slug + + @kernel_slug.setter + def kernel_slug(self, kernel_slug: str): + if kernel_slug is None: + del self.kernel_slug + return + if not isinstance(kernel_slug, str): + raise TypeError('kernel_slug must be of type str') + self._kernel_slug = kernel_slug + + @property + def file_path(self) -> str: + """Relative path to a specific file inside the databundle.""" + return self._file_path or "" + + @file_path.setter + def file_path(self, file_path: str): + if file_path is None: + del self.file_path + return + if not isinstance(file_path, str): + raise TypeError('file_path must be of type str') + self._file_path = file_path + + @property + def version_number(self) -> int: + return self._version_number or 0 + + @version_number.setter + def version_number(self, version_number: int): + if version_number is None: + del self.version_number + return + if not isinstance(version_number, int): + raise TypeError('version_number must be of type int') + self._version_number = version_number + + + def endpoint(self): + if self.file_path: + path = '/api/v1/kernels/output/download/{owner_slug}/{kernel_slug}/{file_path}' + else: + path = '/api/v1/kernels/output/download/{owner_slug}/{kernel_slug}' + return path.format_map(self.to_field_map(self)) + + @staticmethod + def endpoint_path(): + return '/api/v1/kernels/output/download/{owner_slug}/{kernel_slug}' + class ApiGetKernelRequest(KaggleObject): r""" Attributes: @@ -257,6 +339,7 @@ class ApiKernelMetadata(KaggleObject): competition_data_sources (str) model_data_sources (str) total_votes (int) + current_version_number (int) """ def __init__(self): @@ -278,6 +361,7 @@ def __init__(self): self._competition_data_sources = [] self._model_data_sources = [] self._total_votes = 0 + self._current_version_number = None self._freeze() @property @@ -524,6 +608,19 @@ def total_votes(self, total_votes: int): raise TypeError('total_votes must be of type int') self._total_votes = total_votes + @property + def current_version_number(self) -> int: + return self._current_version_number or 0 + + @current_version_number.setter + def current_version_number(self, current_version_number: int): + if current_version_number is None: + del self.current_version_number + return + if not isinstance(current_version_number, int): + raise TypeError('current_version_number must be of type int') + self._current_version_number = current_version_number + class ApiListKernelFilesRequest(KaggleObject): r""" @@ -1616,6 +1713,13 @@ def creation_date(self, creation_date: str): self._creation_date = creation_date +ApiDownloadKernelOutputRequest._fields = [ + FieldMetadata("ownerSlug", "owner_slug", "_owner_slug", str, "", PredefinedSerializer()), + FieldMetadata("kernelSlug", "kernel_slug", "_kernel_slug", str, "", PredefinedSerializer()), + FieldMetadata("filePath", "file_path", "_file_path", str, None, PredefinedSerializer(), optional=True), + FieldMetadata("versionNumber", "version_number", "_version_number", int, None, PredefinedSerializer(), optional=True), +] + ApiGetKernelRequest._fields = [ FieldMetadata("userName", "user_name", "_user_name", str, "", PredefinedSerializer()), FieldMetadata("kernelSlug", "kernel_slug", "_kernel_slug", str, "", PredefinedSerializer()), @@ -1662,6 +1766,7 @@ def creation_date(self, creation_date: str): FieldMetadata("competitionDataSources", "competition_data_sources", "_competition_data_sources", str, [], ListSerializer(PredefinedSerializer())), FieldMetadata("modelDataSources", "model_data_sources", "_model_data_sources", str, [], ListSerializer(PredefinedSerializer())), FieldMetadata("totalVotes", "total_votes", "_total_votes", int, 0, PredefinedSerializer()), + FieldMetadata("currentVersionNumber", "current_version_number", "_current_version_number", int, None, PredefinedSerializer(), optional=True), ] ApiListKernelFilesRequest._fields = [ diff --git a/kagglesdk/models/services/model_api_service.py b/kagglesdk/models/services/model_api_service.py index 7e46c6b..1b8d637 100644 --- a/kagglesdk/models/services/model_api_service.py +++ b/kagglesdk/models/services/model_api_service.py @@ -1,6 +1,6 @@ from kagglesdk.common.types.http_redirect import HttpRedirect from kagglesdk.kaggle_http_client import KaggleHttpClient -from kagglesdk.models.types.model_api_service import ApiCreateModelInstanceRequest, ApiCreateModelInstanceVersionRequest, ApiCreateModelRequest, ApiCreateModelResponse, ApiDeleteModelInstanceRequest, ApiDeleteModelInstanceVersionRequest, ApiDeleteModelRequest, ApiDeleteModelResponse, ApiDownloadModelInstanceVersionRequest, ApiGetModelInstanceRequest, ApiGetModelRequest, ApiListModelInstanceVersionFilesRequest, ApiListModelInstanceVersionFilesResponse, ApiListModelsRequest, ApiListModelsResponse, ApiModel, ApiModelInstance, ApiUpdateModelInstanceRequest, ApiUpdateModelRequest, ApiUpdateModelResponse, ApiUploadModelFileRequest, ApiUploadModelFileResponse +from kagglesdk.models.types.model_api_service import ApiCreateModelInstanceRequest, ApiCreateModelInstanceVersionRequest, ApiCreateModelRequest, ApiCreateModelResponse, ApiDeleteModelInstanceRequest, ApiDeleteModelInstanceVersionRequest, ApiDeleteModelRequest, ApiDeleteModelResponse, ApiDownloadModelInstanceVersionRequest, ApiGetModelInstanceRequest, ApiGetModelRequest, ApiListModelInstanceVersionFilesRequest, ApiListModelInstanceVersionFilesResponse, ApiListModelsRequest, ApiListModelsResponse, ApiModel, ApiModelInstance, ApiUpdateModelInstanceRequest, ApiUpdateModelRequest, ApiUpdateModelResponse, ApiUploadModelFileRequest, ApiUploadModelFileResponse, CreateModelSigningTokenRequest, CreateModelSigningTokenResponse, KeysRequest, KeysResponse, WellKnowEndpointRequest, WellKnowEndpointResponse class ModelApiClient(object): @@ -176,3 +176,50 @@ def upload_model_file(self, request: ApiUploadModelFileRequest = None) -> ApiUpl request = ApiUploadModelFileRequest() return self._client.call("models.ModelApiService", "ApiUploadModelFile", request, ApiUploadModelFileResponse) + + def create_model_signing_token(self, request: CreateModelSigningTokenRequest = None) -> CreateModelSigningTokenResponse: + r""" + Creates an Kaggle issued identity token. The token is signed using a + private key held in KMS that is only accessible by Kaggle model-signer + service account. + + Args: + request (CreateModelSigningTokenRequest): + The request object; initialized to empty instance if not specified. + """ + + if request is None: + request = CreateModelSigningTokenRequest() + + return self._client.call("models.ModelApiService", "CreateModelSigningToken", request, CreateModelSigningTokenResponse) + + def well_know_endpoint(self, request: WellKnowEndpointRequest = None) -> WellKnowEndpointResponse: + r""" + see spec - + https://openid.net/specs/openid-connect-discovery-1_0.html#ProviderConfig. + Must support CORS. The service will have a path component. + + Args: + request (WellKnowEndpointRequest): + The request object; initialized to empty instance if not specified. + """ + + if request is None: + request = WellKnowEndpointRequest() + + return self._client.call("models.ModelApiService", "WellKnowEndpoint", request, WellKnowEndpointResponse) + + def keys(self, request: KeysRequest = None) -> KeysResponse: + r""" + The JWKS endpoint containing the keys to validate the signature of a kaggle + issued signing identity token. + + Args: + request (KeysRequest): + The request object; initialized to empty instance if not specified. + """ + + if request is None: + request = KeysRequest() + + return self._client.call("models.ModelApiService", "Keys", request, KeysResponse) diff --git a/kagglesdk/models/types/model_api_service.py b/kagglesdk/models/types/model_api_service.py index ce2776a..ade58ce 100644 --- a/kagglesdk/models/types/model_api_service.py +++ b/kagglesdk/models/types/model_api_service.py @@ -2537,6 +2537,364 @@ def create_url(self, create_url: str): self._create_url = create_url +class CreateModelSigningTokenRequest(KaggleObject): + r""" + Attributes: + owner_slug (str) + model_slug (str) + """ + + def __init__(self): + self._owner_slug = "" + self._model_slug = "" + self._freeze() + + @property + def owner_slug(self) -> str: + return self._owner_slug + + @owner_slug.setter + def owner_slug(self, owner_slug: str): + if owner_slug is None: + del self.owner_slug + return + if not isinstance(owner_slug, str): + raise TypeError('owner_slug must be of type str') + self._owner_slug = owner_slug + + @property + def model_slug(self) -> str: + return self._model_slug + + @model_slug.setter + def model_slug(self, model_slug: str): + if model_slug is None: + del self.model_slug + return + if not isinstance(model_slug, str): + raise TypeError('model_slug must be of type str') + self._model_slug = model_slug + + + def endpoint(self): + path = '/api/v1/models/signing/token' + return path.format_map(self.to_field_map(self)) + + + @staticmethod + def method(): + return 'POST' + + @staticmethod + def body_fields(): + return '*' + +class CreateModelSigningTokenResponse(KaggleObject): + r""" + Attributes: + id_token (str) + """ + + def __init__(self): + self._id_token = "" + self._freeze() + + @property + def id_token(self) -> str: + return self._id_token + + @id_token.setter + def id_token(self, id_token: str): + if id_token is None: + del self.id_token + return + if not isinstance(id_token, str): + raise TypeError('id_token must be of type str') + self._id_token = id_token + + +class KeysRequest(KaggleObject): + r""" + """ + + pass + + def endpoint(self): + path = '/api/v1/models/signing/keys' + return path.format_map(self.to_field_map(self)) + +class KeysResponse(KaggleObject): + r""" + JWKS specification can be found: + https://openid.net/specs/draft-jones-json-web-key-03.html// + https://datatracker.ietf.org/doc/html/rfc7517 + + Attributes: + keys (JWK) + """ + + def __init__(self): + self._keys = [] + self._freeze() + + @property + def keys(self) -> Optional[List[Optional['JWK']]]: + return self._keys + + @keys.setter + def keys(self, keys: Optional[List[Optional['JWK']]]): + if keys is None: + del self.keys + return + if not isinstance(keys, list): + raise TypeError('keys must be of type list') + if not all([isinstance(t, JWK) for t in keys]): + raise TypeError('keys must contain only items of type JWK') + self._keys = keys + + +class WellKnowEndpointRequest(KaggleObject): + r""" + """ + + pass + + def endpoint(self): + path = '/api/v1/models/signing/.well-known/openid-configuration' + return path.format_map(self.to_field_map(self)) + +class WellKnowEndpointResponse(KaggleObject): + r""" + More details about the response can be found at + https://openid.net/specs/openid-connect-discovery-1_0.html#MetadataContents + + Attributes: + issuer (str) + jwks_uri (str) + The keys endpoint from rpc `ModelApiService.Keys` + token_endpoint (str) + The token endpoint from rpc `ModelApiService.CreateModelSigningToken` + id_token_signing_alg_values_supported (str) + claims_supported (str) + response_types_supported (str) + subject_types_supported (str) + """ + + def __init__(self): + self._issuer = "" + self._jwks_uri = "" + self._token_endpoint = "" + self._id_token_signing_alg_values_supported = [] + self._claims_supported = [] + self._response_types_supported = [] + self._subject_types_supported = [] + self._freeze() + + @property + def issuer(self) -> str: + return self._issuer + + @issuer.setter + def issuer(self, issuer: str): + if issuer is None: + del self.issuer + return + if not isinstance(issuer, str): + raise TypeError('issuer must be of type str') + self._issuer = issuer + + @property + def jwks_uri(self) -> str: + """The keys endpoint from rpc `ModelApiService.Keys`""" + return self._jwks_uri + + @jwks_uri.setter + def jwks_uri(self, jwks_uri: str): + if jwks_uri is None: + del self.jwks_uri + return + if not isinstance(jwks_uri, str): + raise TypeError('jwks_uri must be of type str') + self._jwks_uri = jwks_uri + + @property + def token_endpoint(self) -> str: + """The token endpoint from rpc `ModelApiService.CreateModelSigningToken`""" + return self._token_endpoint + + @token_endpoint.setter + def token_endpoint(self, token_endpoint: str): + if token_endpoint is None: + del self.token_endpoint + return + if not isinstance(token_endpoint, str): + raise TypeError('token_endpoint must be of type str') + self._token_endpoint = token_endpoint + + @property + def id_token_signing_alg_values_supported(self) -> Optional[List[str]]: + return self._id_token_signing_alg_values_supported + + @id_token_signing_alg_values_supported.setter + def id_token_signing_alg_values_supported(self, id_token_signing_alg_values_supported: Optional[List[str]]): + if id_token_signing_alg_values_supported is None: + del self.id_token_signing_alg_values_supported + return + if not isinstance(id_token_signing_alg_values_supported, list): + raise TypeError('id_token_signing_alg_values_supported must be of type list') + if not all([isinstance(t, str) for t in id_token_signing_alg_values_supported]): + raise TypeError('id_token_signing_alg_values_supported must contain only items of type str') + self._id_token_signing_alg_values_supported = id_token_signing_alg_values_supported + + @property + def claims_supported(self) -> Optional[List[str]]: + return self._claims_supported + + @claims_supported.setter + def claims_supported(self, claims_supported: Optional[List[str]]): + if claims_supported is None: + del self.claims_supported + return + if not isinstance(claims_supported, list): + raise TypeError('claims_supported must be of type list') + if not all([isinstance(t, str) for t in claims_supported]): + raise TypeError('claims_supported must contain only items of type str') + self._claims_supported = claims_supported + + @property + def response_types_supported(self) -> Optional[List[str]]: + return self._response_types_supported + + @response_types_supported.setter + def response_types_supported(self, response_types_supported: Optional[List[str]]): + if response_types_supported is None: + del self.response_types_supported + return + if not isinstance(response_types_supported, list): + raise TypeError('response_types_supported must be of type list') + if not all([isinstance(t, str) for t in response_types_supported]): + raise TypeError('response_types_supported must contain only items of type str') + self._response_types_supported = response_types_supported + + @property + def subject_types_supported(self) -> Optional[List[str]]: + return self._subject_types_supported + + @subject_types_supported.setter + def subject_types_supported(self, subject_types_supported: Optional[List[str]]): + if subject_types_supported is None: + del self.subject_types_supported + return + if not isinstance(subject_types_supported, list): + raise TypeError('subject_types_supported must be of type list') + if not all([isinstance(t, str) for t in subject_types_supported]): + raise TypeError('subject_types_supported must contain only items of type str') + self._subject_types_supported = subject_types_supported + + +class JWK(KaggleObject): + r""" + Attributes: + kty (str) + alg (str) + use (str) + kid (str) + n (str) + modulus + e (str) + public exponent + """ + + def __init__(self): + self._kty = "" + self._alg = "" + self._use = "" + self._kid = "" + self._n = "" + self._e = "" + self._freeze() + + @property + def kty(self) -> str: + return self._kty + + @kty.setter + def kty(self, kty: str): + if kty is None: + del self.kty + return + if not isinstance(kty, str): + raise TypeError('kty must be of type str') + self._kty = kty + + @property + def alg(self) -> str: + return self._alg + + @alg.setter + def alg(self, alg: str): + if alg is None: + del self.alg + return + if not isinstance(alg, str): + raise TypeError('alg must be of type str') + self._alg = alg + + @property + def use(self) -> str: + return self._use + + @use.setter + def use(self, use: str): + if use is None: + del self.use + return + if not isinstance(use, str): + raise TypeError('use must be of type str') + self._use = use + + @property + def kid(self) -> str: + return self._kid + + @kid.setter + def kid(self, kid: str): + if kid is None: + del self.kid + return + if not isinstance(kid, str): + raise TypeError('kid must be of type str') + self._kid = kid + + @property + def n(self) -> str: + """modulus""" + return self._n + + @n.setter + def n(self, n: str): + if n is None: + del self.n + return + if not isinstance(n, str): + raise TypeError('n must be of type str') + self._n = n + + @property + def e(self) -> str: + """public exponent""" + return self._e + + @e.setter + def e(self, e: str): + if e is None: + del self.e + return + if not isinstance(e, str): + raise TypeError('e must be of type str') + self._e = e + + ApiCreateModelInstanceRequest._fields = [ FieldMetadata("ownerSlug", "owner_slug", "_owner_slug", str, "", PredefinedSerializer()), FieldMetadata("modelSlug", "model_slug", "_model_slug", str, "", PredefinedSerializer()), @@ -2753,3 +3111,39 @@ def create_url(self, create_url: str): FieldMetadata("createUrl", "create_url", "_create_url", str, "", PredefinedSerializer()), ] +CreateModelSigningTokenRequest._fields = [ + FieldMetadata("ownerSlug", "owner_slug", "_owner_slug", str, "", PredefinedSerializer()), + FieldMetadata("modelSlug", "model_slug", "_model_slug", str, "", PredefinedSerializer()), +] + +CreateModelSigningTokenResponse._fields = [ + FieldMetadata("id_token", "id_token", "_id_token", str, "", PredefinedSerializer()), +] + +KeysRequest._fields = [] + +KeysResponse._fields = [ + FieldMetadata("keys", "keys", "_keys", JWK, [], ListSerializer(KaggleObjectSerializer())), +] + +WellKnowEndpointRequest._fields = [] + +WellKnowEndpointResponse._fields = [ + FieldMetadata("issuer", "issuer", "_issuer", str, "", PredefinedSerializer()), + FieldMetadata("jwks_uri", "jwks_uri", "_jwks_uri", str, "", PredefinedSerializer()), + FieldMetadata("token_endpoint", "token_endpoint", "_token_endpoint", str, "", PredefinedSerializer()), + FieldMetadata("id_token_signing_alg_values_supported", "id_token_signing_alg_values_supported", "_id_token_signing_alg_values_supported", str, [], ListSerializer(PredefinedSerializer())), + FieldMetadata("claims_supported", "claims_supported", "_claims_supported", str, [], ListSerializer(PredefinedSerializer())), + FieldMetadata("response_types_supported", "response_types_supported", "_response_types_supported", str, [], ListSerializer(PredefinedSerializer())), + FieldMetadata("subject_types_supported", "subject_types_supported", "_subject_types_supported", str, [], ListSerializer(PredefinedSerializer())), +] + +JWK._fields = [ + FieldMetadata("kty", "kty", "_kty", str, "", PredefinedSerializer()), + FieldMetadata("alg", "alg", "_alg", str, "", PredefinedSerializer()), + FieldMetadata("use", "use", "_use", str, "", PredefinedSerializer()), + FieldMetadata("kid", "kid", "_kid", str, "", PredefinedSerializer()), + FieldMetadata("n", "n", "_n", str, "", PredefinedSerializer()), + FieldMetadata("e", "e", "_e", str, "", PredefinedSerializer()), +] + diff --git a/kagglesdk/users/types/users_enums.py b/kagglesdk/users/types/users_enums.py index 9b91246..1de7346 100644 --- a/kagglesdk/users/types/users_enums.py +++ b/kagglesdk/users/types/users_enums.py @@ -1,12 +1,5 @@ import enum -class CollaboratorType(enum.Enum): - COLLABORATOR_TYPE_UNSPECIFIED = 0 - READER = 1 - WRITER = 2 - OWNER = 3 - ADMIN = 4 - class UserAchievementTier(enum.Enum): NOVICE = 0 CONTRIBUTOR = 1 @@ -20,3 +13,10 @@ class UserAchievementTier(enum.Enum): RECALC = 21 """Flag user for tier recalculation""" +class CollaboratorType(enum.Enum): + COLLABORATOR_TYPE_UNSPECIFIED = 0 + READER = 1 + WRITER = 2 + OWNER = 3 + ADMIN = 4 + diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..702eca7 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,5 @@ +certifi >= 14.05.14 +six >= 1.10 +python_dateutil >= 2.5.3 +setuptools >= 21.0.0 +urllib3 >= 1.15.1 diff --git a/src/KaggleSwagger.yaml b/src/KaggleSwagger.yaml deleted file mode 100644 index 1931b27..0000000 --- a/src/KaggleSwagger.yaml +++ /dev/null @@ -1,2301 +0,0 @@ -# Copyright 2018 Kaggle Inc -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -swagger: "2.0" -info: - description: API for kaggle.com - version: "1" - title: Kaggle API - termsOfService: https://www.kaggle.com/terms - contact: - name: Contact Kaggle - url: https://www.kaggle.com/contact -host: www.kaggle.com -basePath: /api/v1 -schemes: - - https -securityDefinitions: - basicAuth: - type: basic -security: - - basicAuth: [] -externalDocs: - description: Further Kaggle documentation - url: www.kaggle.com -tags: - - name: kaggle -parameters: - guidParam: - in: path - name: guid - required: true - type: string - minimum: 0 - description: guid specifies location where submission should be uploaded -paths: - /competitions/list: - get: - tags: - - kaggle - summary: List competitions - operationId: CompetitionsList - produces: - - application/json - parameters: - - in: query - name: group - type: string - enum: [general, entered, inClass] - default: general - description: Filter competitions by a particular group - - in: query - name: category - type: string - enum: - [ - all, - featured, - research, - recruitment, - gettingStarted, - masters, - playground, - ] - default: all - description: Filter competitions by a particular category - - in: query - name: sortBy - type: string - enum: - [ - grouped, - prize, - earliestDeadline, - latestDeadline, - numberOfTeams, - recentlyCreated, - ] - default: latestDeadline - description: Sort the results - - in: query - name: page - type: integer - default: 1 - description: Page number - - in: query - name: search - type: string - default: "" - description: Search terms - responses: - 200: - description: Result - schema: - $ref: "#/definitions/Result" - default: - description: Error - schema: - $ref: "#/definitions/Error" - /competitions/submissions/list/{id}: - get: - tags: - - kaggle - summary: List competition submissions - operationId: CompetitionsSubmissionsList - produces: - - application/json - parameters: - - in: path - name: id - required: true - type: string - description: Competition name - - in: query - name: page - required: false - type: integer - default: 1 - description: Page number - responses: - 200: - description: Result - schema: - $ref: "#/definitions/Result" - default: - description: Error - schema: - $ref: "#/definitions/Error" - /competitions/{id}/submissions/url/{contentLength}/{lastModifiedDateUtc}: - post: - tags: - - kaggle - summary: Generate competition submission URL - operationId: CompetitionsSubmissionsUrl - produces: - - application/json - consumes: - - multipart/form-data - parameters: - - in: formData - name: fileName - required: false - type: string - description: Competition submission file name - - in: path - name: id - required: true - type: string - description: Competition name, as it appears in the competition's URL - - in: path - name: contentLength - required: true - type: integer - description: Content length of file in bytes - - in: path - name: lastModifiedDateUtc - required: true - type: integer - description: Last modified date of file in seconds since epoch in UTC - responses: - 200: - description: Result - schema: - $ref: "#/definitions/Result" - default: - description: Error - schema: - $ref: "#/definitions/Error" - /competitions/submissions/upload/{guid}/{contentLength}/{lastModifiedDateUtc}: - post: - tags: - - kaggle - summary: Upload competition submission file - operationId: CompetitionsSubmissionsUpload - produces: - - application/json - consumes: - - multipart/form-data - parameters: - - in: formData - name: file - required: true - type: file - description: Competition submission file - - in: path - name: guid - required: true - type: string - description: Location where submission should be uploaded - - in: path - name: contentLength - required: true - type: integer - description: Content length of file in bytes - - in: path - name: lastModifiedDateUtc - required: true - type: integer - description: Last modified date of file in seconds since epoch in UTC - responses: - 200: - description: Result - schema: - $ref: "#/definitions/Result" - default: - description: Error - schema: - $ref: "#/definitions/Error" - /competitions/submissions/submit/{id}: - post: - tags: - - kaggle - summary: Submit to competition - operationId: CompetitionsSubmissionsSubmit - produces: - - application/json - consumes: - - multipart/form-data - parameters: - - in: formData - name: blobFileTokens - required: true - type: string - description: Token identifying location of uploaded submission file - - in: formData - name: submissionDescription - required: true - type: string - description: Description of competition submission - - in: path - name: id - required: true - type: string - description: Competition name - responses: - 200: - description: Result - schema: - $ref: "#/definitions/Result" - default: - description: Error - schema: - $ref: "#/definitions/Error" - /competitions/data/list/{id}: - get: - tags: - - kaggle - summary: List competition data files - operationId: CompetitionsDataListFiles - produces: - - application/json - parameters: - - in: path - name: id - required: true - type: string - description: Competition name - - in: query - name: pageToken - required: false - type: string - description: Page token for pagination - - in: query - name: pageSize - required: false - type: integer - default: 20 - description: Number of items per page (default 20) - responses: - 200: - description: Result - schema: - $ref: "#/definitions/Result" - default: - description: Error - schema: - $ref: "#/definitions/Error" - /competitions/data/download/{id}/{fileName}: - get: - tags: - - kaggle - summary: Download competition data file - operationId: CompetitionsDataDownloadFile - parameters: - - in: path - name: id - required: true - type: string - description: Competition name - - in: path - name: fileName - required: true - type: string - description: Competition name - responses: - 200: - description: Result - schema: - $ref: "#/definitions/Result" - default: - description: Error - schema: - $ref: "#/definitions/Error" - /competitions/data/download-all/{id}: - get: - tags: - - kaggle - summary: Download all competition data files - operationId: CompetitionsDataDownloadFiles - parameters: - - in: path - name: id - required: true - type: string - description: Competition name - responses: - 200: - description: Result - schema: - $ref: "#/definitions/Result" - default: - description: Error - schema: - $ref: "#/definitions/Error" - /competitions/{id}/leaderboard/download: - get: - tags: - - kaggle - summary: Download competition leaderboard - operationId: CompetitionDownloadLeaderboard - parameters: - - in: path - name: id - required: true - type: string - description: Competition name - responses: - 200: - description: Result - schema: - $ref: "#/definitions/Result" - default: - description: Error - schema: - $ref: "#/definitions/Error" - /competitions/{id}/leaderboard/view: - get: - tags: - - kaggle - summary: VIew competition leaderboard - operationId: CompetitionViewLeaderboard - parameters: - - in: path - name: id - required: true - type: string - description: Competition name - responses: - 200: - description: Result - schema: - $ref: "#/definitions/Result" - default: - description: Error - schema: - $ref: "#/definitions/Error" - /datasets/list: - get: - tags: - - kaggle - summary: List datasets - operationId: DatasetsList - produces: - - application/json - parameters: - - in: query - name: group - type: string - enum: [public, my, myPrivate, upvoted, user] - default: public - description: Display datasets by a particular group - - in: query - name: sortBy - type: string - enum: [hottest, votes, updated, active] - default: hottest - description: Sort the results - - in: query - name: size - type: string - enum: [] - default: all - description: (DEPRECATED). Please use --max-size and --min-size to filter dataset sizes. - - in: query - name: filetype - type: string - enum: [all, csv, sqlite, json, bigQuery] - default: all - description: Display datasets of a specific file type - - in: query - name: license - type: string - enum: [all, cc, gpl, odb, other] - default: all - description: Display datasets with a specific license - - in: query - name: tagids - type: string - description: A comma separated list of tags to filter by - - in: query - name: search - default: "" - type: string - description: Search terms - - in: query - name: user - type: string - description: Display datasets by a specific user or organization - - in: query - name: page - type: integer - default: 1 - description: Page number - - in: query - name: maxSize - type: integer - format: int64 - description: Max Dataset Size (bytes) - - in: query - name: minSize - type: integer - format: int64 - description: Max Dataset Size (bytes) - responses: - 200: - description: Result - schema: - $ref: "#/definitions/Result" - default: - description: Error - schema: - $ref: "#/definitions/Error" - /datasets/list/{ownerSlug}/{datasetSlug}: - get: - tags: - - kaggle - summary: List dataset files - operationId: DatasetsListFiles - produces: - - application/json - parameters: - - in: path - name: ownerSlug - required: true - type: string - description: Dataset owner - - in: path - name: datasetSlug - required: true - type: string - description: Dataset name - - in: query - name: datasetVersionNumber - required: false - type: string - description: Dataset version number - - in: query - name: pageToken - required: false - type: string - description: Page token for pagination - - in: query - name: pageSize - required: false - default: 20 - type: integer - description: Number of items per page (default 20) - responses: - 200: - description: Result - schema: - $ref: "#/definitions/Result" - default: - description: Error - schema: - $ref: "#/definitions/Error" - /datasets/status/{ownerSlug}/{datasetSlug}: - get: - tags: - - kaggle - summary: Get dataset creation status - operationId: DatasetsStatus - produces: - - application/json - parameters: - - in: path - name: ownerSlug - required: true - type: string - description: Dataset owner - - in: path - name: datasetSlug - required: true - type: string - description: Dataset name - responses: - 200: - description: Result - schema: - $ref: "#/definitions/Result" - default: - description: Error - schema: - $ref: "#/definitions/Error" - /datasets/download/{ownerSlug}/{datasetSlug}: - get: - tags: - - kaggle - summary: Download dataset file - operationId: DatasetsDownload - produces: - - file - parameters: - - in: path - name: ownerSlug - required: true - type: string - description: Dataset owner - - in: path - name: datasetSlug - required: true - type: string - description: Dataset name - - in: query - name: datasetVersionNumber - required: false - type: string - description: Dataset version number - responses: - 200: - description: Result - schema: - $ref: "#/definitions/Result" - default: - description: Error - schema: - $ref: "#/definitions/Error" - /datasets/download/{ownerSlug}/{datasetSlug}/{fileName}: - get: - tags: - - kaggle - summary: Download dataset file - operationId: DatasetsDownloadFile - produces: - - file - parameters: - - in: path - name: ownerSlug - required: true - type: string - description: Dataset owner - - in: path - name: datasetSlug - required: true - type: string - description: Dataset name - - in: path - name: fileName - required: true - type: string - description: File name - - in: query - name: datasetVersionNumber - required: false - type: string - description: Dataset version number - responses: - 200: - description: Result - schema: - $ref: "#/definitions/Result" - default: - description: Error - schema: - $ref: "#/definitions/Error" - /datasets/create/version/{id}: - post: - tags: - - kaggle - summary: Create a new dataset version by id - operationId: DatasetsCreateVersionById - produces: - - application/json - consumes: - - application/json - parameters: - - in: path - name: id - required: true - type: integer - description: Dataset ID - - in: body - name: datasetNewVersionRequest - required: true - schema: - $ref: "#/definitions/DatasetNewVersionRequest" - description: Information for creating a new dataset version - responses: - 200: - description: Result - schema: - $ref: "#/definitions/Result" - default: - description: Error - schema: - $ref: "#/definitions/Error" - /datasets/create/version/{ownerSlug}/{datasetSlug}: - post: - tags: - - kaggle - summary: Create a new dataset version - operationId: DatasetsCreateVersion - produces: - - application/json - consumes: - - application/json - parameters: - - in: path - name: ownerSlug - required: true - type: string - description: Dataset owner - - in: path - name: datasetSlug - required: true - type: string - description: Dataset name - - in: body - name: datasetNewVersionRequest - required: true - schema: - $ref: "#/definitions/DatasetNewVersionRequest" - description: Information for creating a new dataset version - responses: - 200: - description: Result - schema: - $ref: "#/definitions/Result" - default: - description: Error - schema: - $ref: "#/definitions/Error" - /datasets/create/new: - post: - tags: - - kaggle - summary: Create a new dataset - operationId: DatasetsCreateNew - produces: - - application/json - consumes: - - application/json - parameters: - - in: body - name: datasetNewRequest - required: true - schema: - $ref: "#/definitions/DatasetNewRequest" - description: Information for creating a new dataset - responses: - 200: - description: Result - schema: - $ref: "#/definitions/Result" - default: - description: Error - schema: - $ref: "#/definitions/Error" - /kernels/list: - get: - tags: - - kaggle - summary: List kernels - operationId: KernelsList - produces: - - application/json - parameters: - - in: query - name: page - type: integer - default: 1 - description: Page number - - in: query - name: pageSize - type: integer - default: 20 - description: Number of items per page (default 20) - - in: query - name: search - default: "" - type: string - description: Search terms - - in: query - name: group - type: string - enum: [everyone, profile, upvoted] - default: everyone - description: Display only your kernels - - in: query - name: user - type: string - description: Display kernels by a particular group - - in: query - name: language - type: string - enum: [all, python, r, sqlite, julia] - default: all - description: Display kernels in a specific language - - in: query - name: kernelType - type: string - enum: [all, script, notebook] - default: all - description: Display kernels of a specific type - - in: query - name: outputType - type: string - enum: [all, visualization, data] - default: all - description: Display kernels with a specific output type - - in: query - name: sortBy - type: string - enum: - [ - hotness, - commentCount, - dateCreated, - dateRun, - scoreAscending, - scoreDescending, - viewCount, - voteCount, - relevance, - ] - default: hotness - description: Sort the results. 'relevance' only works if there is a search query - - in: query - name: dataset - type: string - description: Display kernels using the specified dataset - - in: query - name: competition - type: string - description: Display kernels using the specified competition - - in: query - name: parentKernel - type: string - description: Display kernels that have forked the specified kernel - responses: - 200: - description: Result - schema: - $ref: "#/definitions/Result" - default: - description: Error - schema: - $ref: "#/definitions/Error" - /kernels/files: - get: - tags: - - kaggle - summary: List kernel files - operationId: KernelsListFiles - produces: - - application/json - parameters: - - in: query - name: userName - required: true - type: string - description: Kernel owner - - in: query - name: kernelSlug - required: true - type: string - description: Kernel name - - in: query - name: kernelVersionNumber - required: false - type: string - description: Kernel version number - - in: query - name: pageSize - type: integer - default: 20 - description: Number of items per page (default 20) - - in: query - name: pageToken - type: string - description: Page token for pagination - responses: - 200: - description: Result - schema: - $ref: "#/definitions/Result" - default: - description: Error - schema: - $ref: "#/definitions/Error" - /kernels/push: - post: - tags: - - kaggle - summary: Push a new kernel version. Can be used to create a new kernel and update an existing one. - operationId: KernelPush - produces: - - application/json - consumes: - - application/json - parameters: - - in: body - name: kernelPushRequest - required: true - schema: - $ref: "#/definitions/KernelPushRequest" - description: Information for pushing a new kernel version - responses: - 200: - description: Result - schema: - $ref: "#/definitions/Result" - default: - description: Error - schema: - $ref: "#/definitions/Error" - /kernels/pull: - get: - tags: - - kaggle - summary: Pull the latest code from a kernel - operationId: KernelPull - produces: - - application/json - parameters: - - in: query - name: userName - required: true - type: string - description: Kernel owner - - in: query - name: kernelSlug - required: true - type: string - description: Kernel name - responses: - 200: - description: Result - schema: - $ref: "#/definitions/Result" - default: - description: Error - schema: - $ref: "#/definitions/Error" - /kernels/output: - get: - tags: - - kaggle - summary: Download the latest output from a kernel - operationId: KernelOutput - produces: - - application/json - parameters: - - in: query - name: userName - required: true - type: string - description: Kernel owner - - in: query - name: kernelSlug - required: true - type: string - description: Kernel name - responses: - 200: - description: Result - schema: - $ref: "#/definitions/Result" - default: - description: Error - schema: - $ref: "#/definitions/Error" - /kernels/status: - get: - tags: - - kaggle - summary: Get the status of the latest kernel version - operationId: KernelStatus - produces: - - application/json - parameters: - - in: query - name: userName - required: true - type: string - description: Kernel owner - - in: query - name: kernelSlug - required: true - type: string - description: Kernel name - responses: - 200: - description: Result - schema: - $ref: "#/definitions/Result" - default: - description: Error - schema: - $ref: "#/definitions/Error" - /datasets/metadata/{ownerSlug}/{datasetSlug}: - get: - tags: - - kaggle - summary: Get the metadata for a dataset - operationId: MetadataGet - produces: - - application/json - parameters: - - in: path - name: ownerSlug - required: true - type: string - description: Dataset owner - - in: path - name: datasetSlug - required: true - type: string - description: Dataset name - responses: - 200: - description: Result - schema: - $ref: "#/definitions/Result" - default: - description: Error - schema: - $ref: "#/definitions/Error" - post: - tags: - - kaggle - summary: Update the metadata for a dataset - operationId: MetadataPost - consumes: - - application/json - produces: - - application/json - parameters: - - in: path - name: ownerSlug - required: true - type: string - description: Dataset owner - - in: path - name: datasetSlug - required: true - type: string - description: Dataset name - - in: body - name: settings - required: true - description: Dataset metadata to update - type: object - schema: - $ref: "#/definitions/DatasetUpdateSettingsRequest" - responses: - 200: - description: Result - schema: - $ref: "#/definitions/Result" - default: - description: Error - schema: - $ref: "#/definitions/Error" - /models/{ownerSlug}/{modelSlug}/get: - get: - tags: - - kaggle - summary: Get a model - operationId: GetModel - produces: - - application/json - parameters: - - in: path - name: ownerSlug - required: true - type: string - description: Model owner - - in: path - name: modelSlug - required: true - type: string - description: Model name - responses: - 200: - description: Result - schema: - $ref: "#/definitions/Result" - default: - description: Error - schema: - $ref: "#/definitions/Error" - /models/list: - get: - tags: - - kaggle - summary: Lists models - operationId: ModelsList - produces: - - application/json - parameters: - - in: query - name: search - default: "" - type: string - description: Search terms - - in: query - name: sortBy - type: string - enum: - [ - hotness, - downloadCount, - voteCount, - notebookCount, - publishTime, - createTime, - updateTime, - ] - default: hotness - description: Sort the results - - in: query - name: owner - type: string - description: Display models by a specific user or organization - - in: query - name: pageSize - type: integer - default: 20 - description: Number of items per page (default 20) - - in: query - name: pageToken - type: string - description: Page token for pagination - responses: - 200: - description: Result - schema: - $ref: "#/definitions/Result" - default: - description: Error - schema: - $ref: "#/definitions/Error" - /models/{ownerSlug}/{modelSlug}/{framework}/{instanceSlug}/files: - get: - tags: - - kaggle - summary: List model instance files for the current version - operationId: ModelInstanceFiles - produces: - - application/json - parameters: - - in: path - name: ownerSlug - required: true - type: string - description: Model owner - - in: path - name: modelSlug - required: true - type: string - description: Model name - - in: path - name: framework - required: true - type: string - description: Model instance framework - - in: path - name: instanceSlug - required: true - type: string - description: Model instance slug - - in: query - name: pageSize - type: integer - default: 20 - description: Number of items per page (default 20) - - in: query - name: pageToken - type: string - description: Page token for pagination - responses: - 200: - description: Result - schema: - $ref: "#/definitions/Result" - default: - description: Error - schema: - $ref: "#/definitions/Error" - /models/create/new: - post: - tags: - - kaggle - summary: Create a new model - operationId: ModelsCreateNew - produces: - - application/json - consumes: - - application/json - parameters: - - in: body - name: modelNewRequest - required: true - schema: - $ref: "#/definitions/modelNewRequest" - description: Information for creating a new model - responses: - 200: - description: Result - schema: - $ref: "#/definitions/Result" - default: - description: Error - schema: - $ref: "#/definitions/Error" - /models/{ownerSlug}/{modelSlug}/{framework}/{instanceSlug}/get: - get: - tags: - - kaggle - summary: Get a model instance - operationId: GetModelInstance - produces: - - application/json - consumes: - - application/json - parameters: - - in: path - name: ownerSlug - required: true - type: string - description: Model owner - - in: path - name: modelSlug - required: true - type: string - description: Model name - - in: path - name: framework - required: true - type: string - description: Model instance framework - - in: path - name: instanceSlug - required: true - type: string - description: Model instance slug - responses: - 200: - description: Result - schema: - $ref: "#/definitions/Result" - default: - description: Error - schema: - $ref: "#/definitions/Error" - /models/{ownerSlug}/{modelSlug}/create/instance: - post: - tags: - - kaggle - summary: Create a new model instance - operationId: ModelsCreateInstance - produces: - - application/json - consumes: - - application/json - parameters: - - in: path - name: ownerSlug - required: true - type: string - description: Model owner - - in: path - name: modelSlug - required: true - type: string - description: Model slug - - in: body - name: ModelNewInstanceRequest - required: true - schema: - $ref: "#/definitions/ModelNewInstanceRequest" - description: Information for creating a new model instance - responses: - 200: - description: Result - schema: - $ref: "#/definitions/Result" - default: - description: Error - schema: - $ref: "#/definitions/Error" - /models/{ownerSlug}/{modelSlug}/{framework}/{instanceSlug}/create/version: - post: - tags: - - kaggle - summary: Create a new model instance version - operationId: ModelsCreateInstanceVersion - produces: - - application/json - consumes: - - application/json - parameters: - - in: path - name: ownerSlug - required: true - type: string - description: Model owner - - in: path - name: modelSlug - required: true - type: string - description: Model slug - - in: path - name: framework - required: true - type: string - description: Model instance framework - - in: path - name: instanceSlug - required: true - type: string - description: Model instance slug - - in: body - name: ModelInstanceNewVersionRequest - required: true - schema: - $ref: "#/definitions/ModelInstanceNewVersionRequest" - description: Information for creating a new model instance version - responses: - 200: - description: Result - schema: - $ref: "#/definitions/Result" - default: - description: Error - schema: - $ref: "#/definitions/Error" - /models/{ownerSlug}/{modelSlug}/delete: - post: - tags: - - kaggle - summary: Delete a model - operationId: DeleteModel - produces: - - application/json - consumes: - - application/json - parameters: - - in: path - name: ownerSlug - required: true - type: string - description: Model owner - - in: path - name: modelSlug - required: true - type: string - description: Model name - responses: - 200: - description: Result - schema: - $ref: "#/definitions/Result" - default: - description: Error - schema: - $ref: "#/definitions/Error" - /models/{ownerSlug}/{modelSlug}/{framework}/{instanceSlug}/delete: - post: - tags: - - kaggle - summary: Delete a model instance - operationId: DeleteModelInstance - produces: - - application/json - consumes: - - application/json - parameters: - - in: path - name: ownerSlug - required: true - type: string - description: Model owner - - in: path - name: modelSlug - required: true - type: string - description: Model name - - in: path - name: framework - required: true - type: string - description: Model instance framework - - in: path - name: instanceSlug - required: true - type: string - description: Model instance slug - responses: - 200: - description: Result - schema: - $ref: "#/definitions/Result" - default: - description: Error - schema: - $ref: "#/definitions/Error" - /models/{ownerSlug}/{modelSlug}/{framework}/{instanceSlug}/{versionNumber}/delete: - post: - tags: - - kaggle - summary: Delete a model instance version - operationId: DeleteModelInstanceVersion - produces: - - application/json - consumes: - - application/json - parameters: - - in: path - name: ownerSlug - required: true - type: string - description: Model owner - - in: path - name: modelSlug - required: true - type: string - description: Model name - - in: path - name: framework - required: true - type: string - description: Model instance framework - - in: path - name: instanceSlug - required: true - type: string - description: Model instance slug - - in: path - name: versionNumber - required: true - type: string - description: Model instance version number - responses: - 200: - description: Result - schema: - $ref: "#/definitions/Result" - default: - description: Error - schema: - $ref: "#/definitions/Error" - /models/{ownerSlug}/{modelSlug}/update: - post: - tags: - - kaggle - summary: Update a model - operationId: UpdateModel - produces: - - application/json - consumes: - - application/json - parameters: - - in: path - name: ownerSlug - required: true - type: string - description: Model owner - - in: path - name: modelSlug - required: true - type: string - description: Model name - - in: body - name: modelUpdateRequest - required: true - schema: - $ref: "#/definitions/modelUpdateRequest" - description: Information for updating a model - responses: - 200: - description: Result - schema: - $ref: "#/definitions/Result" - default: - description: Error - schema: - $ref: "#/definitions/Error" - /models/{ownerSlug}/{modelSlug}/{framework}/{instanceSlug}/update: - post: - tags: - - kaggle - summary: Update a model - operationId: UpdateModelInstance - produces: - - application/json - consumes: - - application/json - parameters: - - in: path - name: ownerSlug - required: true - type: string - description: Model owner - - in: path - name: modelSlug - required: true - type: string - description: Model name - - in: path - name: framework - required: true - type: string - description: Model instance framework - - in: path - name: instanceSlug - required: true - type: string - description: Model instance slug - - in: body - name: modelInstanceUpdateRequest - required: true - schema: - $ref: "#/definitions/modelInstanceUpdateRequest" - description: Information for updating a model instance - responses: - 200: - description: Result - schema: - $ref: "#/definitions/Result" - default: - description: Error - schema: - $ref: "#/definitions/Error" - /models/{ownerSlug}/{modelSlug}/{framework}/{instanceSlug}/{versionNumber}/download: - get: - tags: - - kaggle - summary: Download model instance version files - operationId: ModelInstanceVersionsDownload - produces: - - file - parameters: - - in: path - name: ownerSlug - required: true - type: string - description: Model owner - - in: path - name: modelSlug - required: true - type: string - description: Model name - - in: path - name: framework - required: true - type: string - description: Model instance framework - - in: path - name: instanceSlug - required: true - type: string - description: Model instance slug - - in: path - name: versionNumber - required: true - type: string - description: Model instance version number - responses: - 200: - description: Result - schema: - $ref: "#/definitions/Result" - default: - description: Error - schema: - $ref: "#/definitions/Error" - /models/{ownerSlug}/{modelSlug}/{framework}/{instanceSlug}/{versionNumber}/files: - get: - tags: - - kaggle - summary: List model instance version files - operationId: ModelInstanceVersionFiles - produces: - - application/json - parameters: - - in: path - name: ownerSlug - required: true - type: string - description: Model owner - - in: path - name: modelSlug - required: true - type: string - description: Model name - - in: path - name: framework - required: true - type: string - description: Model instance framework - - in: path - name: instanceSlug - required: true - type: string - description: Model instance slug - - in: path - name: versionNumber - required: true - type: string - description: Model instance version number - - in: query - name: pageSize - type: integer - default: 20 - description: Number of items per page (default 20) - - in: query - name: pageToken - type: string - description: Page token for pagination - responses: - 200: - description: Result - schema: - $ref: "#/definitions/Result" - default: - description: Error - schema: - $ref: "#/definitions/Error" - /blobs/upload: - post: - tags: - - kaggle - summary: Start uploading a file - operationId: UploadFile - produces: - - application/json - consumes: - - application/json - parameters: - - in: body - name: startBlobUploadRequest - required: true - schema: - $ref: "#/definitions/StartBlobUploadRequest" - responses: - 200: - description: StartBlobUploadResponse - schema: - $ref: "#/definitions/StartBlobUploadResponse" - default: - description: Error - schema: - $ref: "#/definitions/Error" - /inbox/files/create: - post: - tags: - - kaggle - summary: Creates (aka "drops") a new file into the inbox. - operationId: CreateInboxFile - produces: - - application/json - consumes: - - application/json - parameters: - - in: body - name: createInboxFileRequest - required: true - schema: - $ref: "#/definitions/CreateInboxFileRequest" - responses: - 200: - description: Result - schema: - $ref: "#/definitions/Result" - default: - description: Error - schema: - $ref: "#/definitions/Error" -definitions: - Result: - type: object - Error: - type: object - properties: - code: - type: integer - example: 404 - description: The server error code returned - message: - type: string - example: NotFound - description: The error message generated by the server - DatasetColumn: - type: object - properties: - order: - type: number - description: The order that the column comes in, 0-based. (The first column is 0, second is 1, etc.) - name: - type: string - description: The column name - type: - type: string - description: The type of all of the fields in the column. Please see the data types on https://github.com/Kaggle/kaggle-api/wiki/Dataset-Metadata - originalType: - type: string - description: Used to store the original type of the column, which will be converted to Kaggle's types. For example, an `originalType` of `"integer"` would convert to a `type` of `"numeric"` - description: - type: string - description: The description of the column - UploadFile: - type: object - properties: - token: - type: string - description: A token referencing a specific file upload that can be used across requests - description: - type: string - description: The file description - columns: - type: array - description: A list of dataset column metadata - items: - $ref: "#/definitions/DatasetColumn" - DatasetNewVersionRequest: - type: object - required: - - versionNotes - - files - properties: - versionNotes: - type: string - description: The version notes for the new dataset version - subtitle: - type: string - description: The subtitle to set on the dataset - description: - type: string - description: The description to set on the dataset - files: - type: array - description: A list of files that should be associated with the dataset - items: - $ref: "#/definitions/UploadFile" - convertToCsv: - type: boolean - description: Whether or not a tabular dataset should be converted to csv - default: true - categoryIds: - type: array - description: A list of tag IDs to associated with the dataset - items: - type: string - deleteOldVersions: - type: boolean - description: Whether or not all previous versions of the dataset should be deleted upon creating the new version - default: false - DatasetNewRequest: - type: object - required: - - title - - licenseShortName - - files - properties: - title: - type: string - description: The title of the new dataset - slug: - type: string - description: The slug that the dataset should be created with - ownerSlug: - type: string - description: The owner's username - licenseName: - type: string - description: The license that should be associated with the dataset - default: unknown - enum: - - CC0-1.0 - - CC-BY-SA-4.0 - - GPL-2.0 - - ODbL-1.0 - - CC-BY-NC-SA-4.0 - - unknown - - DbCL-1.0 - - CC-BY-SA-3.0 - - copyright-authors - - other - - reddit-api - - world-bank - - CC-BY-4.0 - - CC-BY-NC-4.0 - - PDDL - - CC-BY-3.0 - - CC-BY-3.0-IGO - - US-Government-Works - - CC-BY-NC-SA-3.0-IGO - - CDLA-Permissive-1.0 - - CDLA-Sharing-1.0 - - CC-BY-ND-4.0 - - CC-BY-NC-ND-4.0 - - ODC-BY-1.0 - - LGPL-3.0 - - AGPL-3.0 - - FDL-1.3 - - EU-ODP-Legal-Notice - - apache-2.0 - - GPL-3.0 - subtitle: - type: string - description: The subtitle to be set on the dataset - description: - type: string - description: The description to be set on the dataset - default: "" - files: - type: array - description: A list of files that should be associated with the dataset - items: - $ref: "#/definitions/UploadFile" - isPrivate: - type: boolean - description: Whether or not the dataset should be private - default: true - convertToCsv: - type: boolean - description: Whether or not a tabular dataset should be converted to csv - default: true - categoryIds: - type: array - description: A list of tag IDs to associated with the dataset - items: - type: string - KernelPushRequest: - type: object - required: - - text - - language - - kernelType - properties: - id: - type: integer - description: The kernel's ID number. One of `id` and `slug` are required. If both are specified, `id` will be preferred - slug: - type: string - description: The full slug of the kernel to push to, in the format `USERNAME/KERNEL-SLUG`. The kernel slug must be the title lowercased with dashes (`-`) replacing spaces. One of `id` and `slug` are required. If both are specified, `id` will be preferred - newTitle: - type: string - description: The title to be set on the kernel - text: - type: string - description: The kernel's source code - language: - type: string - description: The language that the kernel is written in - enum: - - python - - r - - rmarkdown - kernelType: - type: string - description: The type of kernel. Cannot be changed once the kernel has been created - enum: - - script - - notebook - isPrivate: - type: boolean - description: Whether or not the kernel should be private - enableGpu: - type: boolean - description: Whether or not the kernel should run on a GPU - enableTpu: - type: boolean - description: Whether or not the kernel should run on a TPU - enableInternet: - type: boolean - description: Whether or not the kernel should be able to access the internet - datasetDataSources: - type: array - description: A list of dataset data sources that the kernel should use. Each dataset is specified as `USERNAME/DATASET-SLUG` - items: - type: string - competitionDataSources: - type: array - description: A list of competition data sources that the kernel should use - items: - type: string - kernelDataSources: - type: array - description: A list of kernel data sources that the kernel should use. Each dataset is specified as `USERNAME/KERNEL-SLUG` - items: - type: string - modelDataSources: - type: array - description: A list of model data sources that the kernel should use. Each model is specified as `USERNAME/MODEL-SLUG/FRAMEWORK/VARIATION-SLUG/VERSION-NUMBER` - items: - type: string - categoryIds: - type: array - description: A list of tag IDs to associated with the kernel - items: - type: string - dockerImagePinningType: - type: string - description: Which docker image to use for executing new versions going forward. - enum: - - original - - latest - DatasetUpdateSettingsRequest: - type: object - required: - properties: - title: - type: string - description: Title of the dataset - subtitle: - type: string - description: Subtitle of the dataset - description: - type: string - description: Decription of the dataset - isPrivate: - type: boolean - description: Whether or not the dataset should be private - licenses: - type: array - description: A list of licenses that apply to this dataset - items: - type: object - schema: - $ref: "!/definitions/License" - keywords: - type: array - description: A list of keywords that apply to this dataset - items: - type: string - collaborators: - type: array - description: A list of collaborators that may read or edit this dataset - items: - type: object - schema: - $ref: "!definitions/Collaborators" - data: - type: array - description: A list containing metadata for each file in the dataset - items: - type: object - License: - type: object - required: - - name - properties: - name: - type: string - description: Name of the license - enum: - - CC0-1.0 - - CC-BY-SA-4.0 - - GPL-2.0 - - ODbL-1.0 - - CC-BY-NC-SA-4.0 - - unknown - - DbCL-1.0 - - CC-BY-SA-3.0 - - copyright-authors - - other - - reddit-api - - world-bank - - CC-BY-4.0 - - CC-BY-NC-4.0 - - PDDL - - CC-BY-3.0 - - CC-BY-3.0-IGO - - US-Government-Works - - CC-BY-NC-SA-3.0-IGO - - CDLA-Permissive-1.0 - - CDLA-Sharing-1.0 - - CC-BY-ND-4.0 - - CC-BY-NC-ND-4.0 - - ODC-BY-1.0 - - LGPL-3.0 - - AGPL-3.0 - - FDL-1.3 - - EU-ODP-Legal-Notice - - apache-2.0 - - GPL-3.0 - Collaborator: - type: object - required: - - username - - role - properties: - username: - type: string - description: Username of the collaborator - role: - type: string - description: Role of the collaborator - enum: - - reader - - writer - ModelNewRequest: - type: object - required: - - ownerSlug - - slug - - title - - isPrivate - properties: - ownerSlug: - type: string - description: The owner's slug - slug: - type: string - description: The slug that the model should be created with - title: - type: string - description: The title of the new model - subtitle: - type: string - description: The subtitle of the new model - isPrivate: - type: boolean - description: Whether or not the model should be private - default: true - description: - type: string - description: The description to be set on the model - default: "" - publishTime: - type: string - format: date - description: When the model was initially published - provenanceSources: - type: string - description: The provenance sources to be set on the model - default: "" - ModelUpdateRequest: - type: object - properties: - title: - type: string - description: The title of the new model - subtitle: - type: string - description: The subtitle of the new model - isPrivate: - type: boolean - description: Whether or not the model should be private - default: true - description: - type: string - description: The description to be set on the model - default: "" - publishTime: - type: string - format: date - description: When the model was initially published - provenanceSources: - type: string - description: The provenance sources to be set on the model - default: "" - updateMask: - type: string - description: Describes which fields to update - ModelNewInstanceRequest: - type: object - required: - - instanceSlug - - framework - - licenseName - properties: - instanceSlug: - type: string - description: The slug that the model instance should be created with - framework: - type: string - description: The framework of the model instance - default: - enum: - - tensorFlow1 - - tensorFlow2 - - tfLite - - tfJs - - pyTorch - - jax - - flax - - pax - - maxText - - gemmaCpp - - tensorRtLlm - - ggml - - gguf - - coral - - scikitLearn - - mxnet - - onnx - - keras - - transformers - - triton - - other - overview: - type: string - description: The overview of the model instance (markdown) - usage: - type: string - description: The description of how to use the model instance (markdown) - licenseName: - type: string - description: The license that should be associated with the model instance - default: Apache 2.0 - # Use go/kaggle-models-license-values-swagger to get the accepted values (both abbreviation & full name are accepted) - enum: - - CC0 1.0 - - CC0: Public Domain - - CC BY-NC-SA 4.0 - - Unknown - - CC BY-SA 4.0 - - GPL 2 - - CC BY-SA 3.0 - - Other - - Other (specified in description) - - CC BY 4.0 - - Attribution 4.0 International (CC BY 4.0) - - CC BY-NC 4.0 - - Attribution-NonCommercial 4.0 International (CC BY-NC 4.0) - - PDDL - - ODC Public Domain Dedication and Licence (PDDL) - - CC BY 3.0 - - Attribution 3.0 Unported (CC BY 3.0) - - CC BY 3.0 IGO - - Attribution 3.0 IGO (CC BY 3.0 IGO) - - CC BY-NC-SA 3.0 IGO - - Attribution-NonCommercial-ShareAlike 3.0 IGO (CC BY-NC-SA 3.0 IGO) - - CDLA Permissive 1.0 - - Community Data License Agreement - Permissive - Version 1.0 - - CDLA Sharing 1.0 - - Community Data License Agreement - Sharing - Version 1.0 - - CC BY-ND 4.0 - - Attribution-NoDerivatives 4.0 International (CC BY-ND 4.0) - - CC BY-NC-ND 4.0 - - Attribution-NonCommercial-NoDerivatives 4.0 International (CC BY-NC-ND 4.0) - - ODC-BY 1.0 - - ODC Attribution License (ODC-By) - - LGPL 3.0 - - GNU Lesser General Public License 3.0 - - AGPL 3.0 - - GNU Affero General Public License 3.0 - - FDL 1.3 - - GNU Free Documentation License 1.3 - - apache-2.0 - - Apache 2.0 - - mit - - MIT - - bsd-3-clause - - BSD-3-Clause - - Llama 2 - - Llama 2 Community License - - Gemma - - gpl-3 - - GPL 3 - - RAIL-M - - AI Pubs Open RAIL-M License - - AIPubs Research-Use RAIL-M - - AI Pubs Research-Use RAIL-M License - - BigScience OpenRAIL-M - - BigScience Open RAIL-M License - - RAIL - - RAIL (specified in description) - - Llama 3 - - Llama 3 Community License - fineTunable: - type: boolean - description: Whether the model instance is fine tunable - default: true - trainingData: - type: array - description: A list of training data (urls or names) - items: - type: string - modelInstanceType: - type: string - description: Whether the model instance is a base model, external variant, internal variant, or unspecified - enum: - - Unspecified - - BaseModel - - KaggleVariant - - ExternalVariant - baseModelInstance: - type: string - description: If this is an internal variant, the `{owner-slug}/{model-slug}/{framework}/{instance-slug}` of the base model instance - externalBaseModelUrl: - type: integer - description: If this is an external variant, a URL to the base model - files: - type: array - description: A list of files that should be associated with the model instance version - items: - $ref: "#/definitions/UploadFile" - ModelInstanceUpdateRequest: - type: object - required: - - updateMask - properties: - overview: - type: string - description: The overview of the model instance (markdown) - usage: - type: string - description: The description of how to use the model instance (markdown) - licenseName: - type: string - description: The license that should be associated with the model instance - default: Apache 2.0 - # Use go/kaggle-models-license-values-swagger to get the accepted values (both abbreviation & full name are accepted) - enum: - - CC0 1.0 - - CC0: Public Domain - - CC BY-NC-SA 4.0 - - Unknown - - CC BY-SA 4.0 - - GPL 2 - - CC BY-SA 3.0 - - Other - - Other (specified in description) - - CC BY 4.0 - - Attribution 4.0 International (CC BY 4.0) - - CC BY-NC 4.0 - - Attribution-NonCommercial 4.0 International (CC BY-NC 4.0) - - PDDL - - ODC Public Domain Dedication and Licence (PDDL) - - CC BY 3.0 - - Attribution 3.0 Unported (CC BY 3.0) - - CC BY 3.0 IGO - - Attribution 3.0 IGO (CC BY 3.0 IGO) - - CC BY-NC-SA 3.0 IGO - - Attribution-NonCommercial-ShareAlike 3.0 IGO (CC BY-NC-SA 3.0 IGO) - - CDLA Permissive 1.0 - - Community Data License Agreement - Permissive - Version 1.0 - - CDLA Sharing 1.0 - - Community Data License Agreement - Sharing - Version 1.0 - - CC BY-ND 4.0 - - Attribution-NoDerivatives 4.0 International (CC BY-ND 4.0) - - CC BY-NC-ND 4.0 - - Attribution-NonCommercial-NoDerivatives 4.0 International (CC BY-NC-ND 4.0) - - ODC-BY 1.0 - - ODC Attribution License (ODC-By) - - LGPL 3.0 - - GNU Lesser General Public License 3.0 - - AGPL 3.0 - - GNU Affero General Public License 3.0 - - FDL 1.3 - - GNU Free Documentation License 1.3 - - apache-2.0 - - Apache 2.0 - - mit - - MIT - - bsd-3-clause - - BSD-3-Clause - - Llama 2 - - Llama 2 Community License - - Gemma - - gpl-3 - - GPL 3 - - RAIL-M - - AI Pubs Open RAIL-M License - - AIPubs Research-Use RAIL-M - - AI Pubs Research-Use RAIL-M License - - BigScience OpenRAIL-M - - BigScience Open RAIL-M License - - RAIL - - RAIL (specified in description) - - Llama 3 - - Llama 3 Community License - fineTunable: - type: boolean - description: Whether the model instance is fine tunable - default: true - trainingData: - type: array - description: A list of training data (urls or names) - items: - type: string - modelInstanceType: - type: string - description: Whether the model instance is a base model, external variant, internal variant, or unspecified - enum: - - Unspecified - - BaseModel - - KaggleVariant - - ExternalVariant - baseModelInstance: - type: string - description: If this is an internal variant, the `{owner-slug}/{model-slug}/{framework}/{instance-slug}` of the base model instance - externalBaseModelUrl: - type: integer - description: If this is an external variant, a URL to the base model - updateMask: - type: string - description: Describes which fields to update - ModelInstanceNewVersionRequest: - type: object - required: - - files - properties: - versionNotes: - type: string - description: The version notes for the model instance version - files: - type: array - description: A list of files that should be associated with the model instance version - items: - $ref: "#/definitions/UploadFile" - StartBlobUploadRequest: - type: object - required: - - bucket - - name - - contentLength - - lastUpdateTime - properties: - type: - type: object - description: The type of the blob (one of "dataset", "model", "inbox") - schema: - $ref: "#definitions/ApiBlobType" - name: - type: string - description: Name of the file - contentLength: - type: integer - description: Content length of the file in bytes - contentType: - type: string - description: Content/MIME type (e.g. "text/plain") of the file - lastModifiedEpochSeconds: - type: integer - description: Last modified date of file in seconds since epoch in UTC - ApiBlobType: - type: string - description: To which entity this blob refers - enum: - - dataset - - model - - inbox - StartBlobUploadResponse: - type: object - required: - - token - - createUrl - properties: - token: - type: string - description: Opaque string token used to reference the new blob/file. - createUrl: - type: string - description: URL to use to start the upload. - CreateInboxFileRequest: - type: object - required: - - virtualDirectory - - blobFileToken - properties: - virtualDirectory: - type: string - description: Directory name used for tagging the uploaded file - blobFileToken: - type: string - description: Token representing the uploaded file diff --git a/src/KaggleSwaggerConfig.json b/src/KaggleSwaggerConfig.json deleted file mode 100644 index 4b3dd88..0000000 --- a/src/KaggleSwaggerConfig.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "projectName": "kaggle", - "packageName" : "kaggle", - "packageVersion" : "1" -} diff --git a/src/kaggle/__init__.py b/src/kaggle/__init__.py index 94a24d9..144129b 100644 --- a/src/kaggle/__init__.py +++ b/src/kaggle/__init__.py @@ -1,7 +1,6 @@ # coding=utf-8 from __future__ import absolute_import from kaggle.api.kaggle_api_extended import KaggleApi -from kaggle.api_client import ApiClient -api = KaggleApi(ApiClient()) +api = KaggleApi() api.authenticate() diff --git a/src/kaggle/api/kaggle_api_extended.py b/src/kaggle/api/kaggle_api_extended.py index 9ba3583..c9a8136 100644 --- a/src/kaggle/api/kaggle_api_extended.py +++ b/src/kaggle/api/kaggle_api_extended.py @@ -1,6 +1,6 @@ #!/usr/bin/python # -# Copyright 2019 Kaggle Inc +# Copyright 2024 Kaggle Inc # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -32,6 +32,10 @@ import bleach import requests import urllib3.exceptions as urllib3_exceptions +from requests import RequestException + +from kaggle.models.kaggle_models_extended import ResumableUploadResult, File + from requests.adapters import HTTPAdapter from slugify import slugify from tqdm import tqdm @@ -40,6 +44,9 @@ from kaggle.configuration import Configuration from kagglesdk import KaggleClient, KaggleEnv +from kagglesdk.admin.types.inbox_file_service import CreateInboxFileRequest +from kagglesdk.blobs.types.blob_api_service import ApiStartBlobUploadRequest, \ + ApiStartBlobUploadResponse, ApiBlobType from kagglesdk.competitions.types.competition_api_service import * from kagglesdk.datasets.types.dataset_api_service import ApiListDatasetsRequest, \ ApiListDatasetFilesRequest, \ @@ -51,7 +58,7 @@ from kagglesdk.datasets.types.dataset_enums import DatasetSelectionGroup, \ DatasetSortBy, DatasetFileTypeGroup, DatasetLicenseGroup from kagglesdk.datasets.types.dataset_types import DatasetSettings, \ - SettingsLicense, DatasetCollaborator, DatasetSettingsFile + SettingsLicense, DatasetCollaborator from kagglesdk.kernels.types.kernels_api_service import ApiListKernelsRequest, \ ApiListKernelFilesRequest, ApiSaveKernelRequest, ApiGetKernelRequest, \ ApiListKernelSessionOutputRequest, ApiGetKernelSessionStatusRequest @@ -67,36 +74,8 @@ ApiDownloadModelInstanceVersionRequest, ApiDeleteModelInstanceVersionRequest from kagglesdk.models.types.model_enums import ListModelsOrderBy, \ ModelInstanceType, ModelFramework -from .kaggle_api import KaggleApi -from ..api_client import ApiClient -from ..models.api_blob_type import ApiBlobType -from ..models.collaborator import Collaborator -from ..models.create_inbox_file_request import CreateInboxFileRequest from ..models.dataset_column import DatasetColumn -from ..models.dataset_new_request import DatasetNewRequest -from ..models.dataset_update_settings_request import DatasetUpdateSettingsRequest -from ..models.kaggle_models_extended import DatasetNewResponse -from ..models.kaggle_models_extended import DatasetNewVersionResponse -from ..models.kaggle_models_extended import File -from ..models.kaggle_models_extended import Kernel -from ..models.kaggle_models_extended import KernelPushResponse -from ..models.kaggle_models_extended import ListFilesResult -from ..models.kaggle_models_extended import Metadata -from ..models.kaggle_models_extended import Model -from ..models.kaggle_models_extended import ModelDeleteResponse -from ..models.kaggle_models_extended import ModelNewResponse -from ..models.kaggle_models_extended import ResumableUploadResult -from ..models.kernel_push_request import KernelPushRequest -from ..models.license import License -from ..models.model_instance_new_version_request import ModelInstanceNewVersionRequest -from ..models.model_instance_update_request import ModelInstanceUpdateRequest -from ..models.model_new_instance_request import ModelNewInstanceRequest -from ..models.model_new_request import ModelNewRequest -from ..models.model_update_request import ModelUpdateRequest -from ..models.start_blob_upload_request import StartBlobUploadRequest -from ..models.start_blob_upload_response import StartBlobUploadResponse from ..models.upload_file import UploadFile -from ..rest import ApiException class DirectoryArchive(object): @@ -250,13 +229,16 @@ def to_dict(self): } def from_dict(other, context): + req = ApiStartBlobUploadRequest() + req.from_dict(other['start_blob_upload_request']) new = ResumableFileUpload( other['path'], - StartBlobUploadRequest(**other['start_blob_upload_request']), context) + ApiStartBlobUploadRequest(**other['start_blob_upload_request']), + context) new.timestamp = other.get('timestamp') start_blob_upload_response = other.get('start_blob_upload_response') if start_blob_upload_response is not None: - new.start_blob_upload_response = StartBlobUploadResponse( + new.start_blob_upload_response = ApiStartBlobUploadResponse( **start_blob_upload_response) new.upload_complete = other.get('upload_complete') or False return new @@ -268,7 +250,7 @@ def __repr__(self): return self.to_str() -class KaggleApi(KaggleApi): +class KaggleApi: __version__ = '1.6.17' CONFIG_NAME_PROXY = 'proxy' @@ -370,13 +352,13 @@ class KaggleApi(KaggleApi): 'ref', 'title', 'size', 'lastUpdated', 'downloadCount', 'voteCount', 'usabilityRating' ] - dataset_file_fields = ['name', 'size', - 'creationDate'] # TODO databundle_file_files? + dataset_file_fields = ['name', 'total_bytes', 'creationDate'] model_fields = ['id', 'ref', 'title', 'subtitle', 'author'] model_all_fields = [ 'id', 'ref', 'author', 'slug', 'title', 'subtitle', 'isPrivate', 'description', 'publishTime' ] + model_file_fields = ['name', 'size', 'creationDate'] def _is_retriable(self, e): return issubclass(type(e), ConnectionError) or \ @@ -525,22 +507,6 @@ def _load_config(self, config_data): self.config_values = config_data - try: - self.api_client = ApiClient(configuration) - - except Exception as error: - - if 'Proxy' in type(error).__name__: - raise ValueError('The specified proxy ' + - config_data[self.CONFIG_NAME_PROXY] + - ' is not valid, please check your proxy settings') - else: - raise ValueError( - 'Unauthorized: you must download an API key or export ' - 'credentials to the environment. Please see\n ' + - 'https://github.com/Kaggle/kaggle-api#api-credentials ' + - 'for instructions.') - def read_config_file(self, config_data=None, quiet=False): """read_config_file is the first effort to get a username and key to authenticate to the Kaggle API. Since we can get the @@ -711,12 +677,12 @@ def build_kaggle_client(self): else KaggleEnv.LOCAL if '--local' in self.args \ else KaggleEnv.PROD verbose = '--verbose' in self.args or '-v' in self.args - config = self.api_client.configuration + # config = self.api_client.configuration return KaggleClient( env=env, verbose=verbose, - username=config.username, - password=config.password) + username=self.config_values['username'], + password=self.config_values['key']) def camel_to_snake(self, name): """ @@ -765,7 +731,7 @@ def competitions_list(self, raise ValueError('Invalid group specified. Valid options are ' + str(self.valid_competition_groups)) if group == 'all': - group = CompetitionListTab.COMPETITION_LIST_TAB_DEFAULT + group = CompetitionListTab.COMPETITION_LIST_TAB_EVERYTHING else: group = self.lookup_enum(CompetitionListTab, group) @@ -886,8 +852,8 @@ def competition_submit_cli(self, try: submit_result = self.competition_submit(file_name, message, competition, quiet) - except ApiException as e: - if e.status == 404: + except RequestException as e: + if e.response and e.response.status_code == 404: print('Could not find competition - please verify that you ' 'entered the correct competition ID and that the ' 'competition is still accepting submissions.') @@ -1052,7 +1018,8 @@ def competition_download_file(self, outfile = os.path.join(effective_path, url.split('?')[0].split('/')[-1]) if force or self.download_needed(response, outfile, quiet): - self.download_file(response, outfile, quiet, not force) + self.download_file(response, outfile, kaggle.http_client(), quiet, + not force) def competition_download_files(self, competition, @@ -1362,25 +1329,43 @@ def dataset_metadata_update(self, dataset, path): with open(meta_file, 'r') as f: s = json.load(f) metadata = json.loads(s) - updateSettingsRequest = DatasetUpdateSettingsRequest( - title=metadata.get('title') or '', - subtitle=metadata.get('subtitle') or '', - description=metadata.get('description') or '', - is_private=metadata.get('isPrivate') or False, - licenses=[License(name=l['name']) for l in metadata['licenses']] - if metadata.get('licenses') else [], - keywords=metadata.get('keywords'), - collaborators=[ - Collaborator(username=c['username'], role=c['role']) - for c in metadata['collaborators'] - ] if metadata.get('collaborators') else [], - data=metadata.get('data')) - result = self.process_response( - self.metadata_post_with_http_info(owner_slug, dataset_slug, - updateSettingsRequest)) - if len(result['errors']) > 0: - [print(e['message']) for e in result['errors']] - exit(1) + update_settings = DatasetSettings() + update_settings.title = metadata.get('title') or '' + update_settings.subtitle = metadata.get('subtitle') or '' + update_settings.description = metadata.get('description') or '' + update_settings.is_private = metadata.get('isPrivate') or False + update_settings.licenses = [ + self._new_license(l['name']) for l in metadata['licenses'] + ] if metadata.get('licenses') else [] + update_settings.keywords = metadata.get('keywords') + update_settings.collaborators = [ + self._new_collaborator(c['username'], c['role']) + for c in metadata['collaborators'] + ] if metadata.get('collaborators') else [] + update_settings.data = metadata.get('data') + request = ApiUpdateDatasetMetadataRequest() + request.owner_slug = owner_slug + request.dataset_slug = dataset_slug + request.settings = update_settings + with self.build_kaggle_client() as kaggle: + response = kaggle.datasets.dataset_api_client.update_dataset_metadata( + request) + if len(response.errors) > 0: + [print(e['message']) for e in response.errors] + exit(1) + + @staticmethod + def _new_license(name): + l = SettingsLicense() + l.name = name + return l + + @staticmethod + def _new_collaborator(name, role): + u = DatasetCollaborator() + u.username = name + u.role = role + return u def dataset_metadata(self, dataset, path): (owner_slug, dataset_slug, @@ -1441,7 +1426,7 @@ def dataset_list_files(self, dataset, page_token=None, page_size=20): request.page_token = page_token request.page_size = page_size response = kaggle.datasets.dataset_api_client.list_dataset_files(request) - return ListFilesResult(response) + return response def dataset_list_files_cli(self, dataset, @@ -1467,7 +1452,7 @@ def dataset_list_files_cli(self, if result.error_message: print(result.error_message) else: - next_page_token = result.nextPageToken + next_page_token = result.next_page_token if next_page_token: print('Next Page Token = {}'.format(next_page_token)) fields = ['name', 'size', 'creationDate'] @@ -1724,11 +1709,11 @@ def _upload_blob(self, path, quiet, blob_type, upload_context): content_length = os.path.getsize(path) last_modified_epoch_seconds = int(os.path.getmtime(path)) - start_blob_upload_request = StartBlobUploadRequest( - blob_type, - file_name, - content_length, - last_modified_epoch_seconds=last_modified_epoch_seconds) + start_blob_upload_request = ApiStartBlobUploadRequest() + start_blob_upload_request.type = blob_type + start_blob_upload_request.name = file_name + start_blob_upload_request.content_length = content_length + start_blob_upload_request.last_modified_epoch_seconds = last_modified_epoch_seconds file_upload = upload_context.new_resumable_file_upload( path, start_blob_upload_request) @@ -1739,10 +1724,11 @@ def _upload_blob(self, path, quiet, blob_type, upload_context): if not file_upload.can_resume: # Initiate upload on Kaggle backend to get the url and token. - start_blob_upload_response = self.process_response( - self.with_retry(self.upload_file_with_http_info)( - file_upload.start_blob_upload_request)) - file_upload.upload_initiated(start_blob_upload_response) + with self.build_kaggle_client() as kaggle: + method = kaggle.blobs.blob_api_client.start_blob_upload + start_blob_upload_response = self.with_retry(method)( + file_upload.start_blob_upload_request) + file_upload.upload_initiated(start_blob_upload_response) upload_result = self.upload_complete( path, @@ -1962,20 +1948,19 @@ def dataset_create_new(self, if subtitle and (len(subtitle) < 20 or len(subtitle) > 80): raise ValueError('Subtitle length must be between 20 and 80 characters') - request = DatasetNewRequest( - title=title, - slug=dataset_slug, - owner_slug=owner_slug, - license_name=license_name, - subtitle=subtitle, - description=description, - files=[], - is_private=not public, - convert_to_csv=convert_to_csv, - category_ids=keywords) + request = ApiCreateDatasetRequest() + request.title = title + request.slug = dataset_slug + request.owner_slug = owner_slug + request.license_name = license_name + request.subtitle = subtitle + request.description = description + request.files = [] + request.is_private = not public + # request.convert_to_csv=convert_to_csv + request.category_ids = keywords with ResumableUploadContext() as upload_context: - # TODO Change upload_files() to use ApiCreateDatasetRequest self.upload_files(request, resources, folder, ApiBlobType.DATASET, upload_context, quiet, dir_mode) @@ -2031,6 +2016,7 @@ def dataset_create_new_cli(self, def download_file(self, response, outfile, + http_client, quiet=True, resume=False, chunk_size=1048576): @@ -2040,6 +2026,7 @@ def download_file(self, ========== response: the response to download outfile: the output file to download to + http_client: the Kaggle http client to use quiet: suppress verbose output (default is True) chunk_size: the size of the chunk to stream resume: whether to resume an existing download @@ -2076,10 +2063,10 @@ def download_file(self, size - size_read, )) - request_history = response.retries.history[0] - response = self.api_client.request( - request_history.method, - request_history.redirect_location, + request_history = response.history[0] + response = http_client.call( + request_history.request.method, + request_history.headers['location'], headers={'Range': 'bytes=%d-' % (size_read,)}, _preload_content=False) @@ -2837,7 +2824,7 @@ def model_get_cli(self, model, folder=None): data['slug'] = model_ref_split[1] data['title'] = model.title data['subtitle'] = model.subtitle - data['isPrivate'] = model.isPrivate # TODO Add a test to ensure default is True + data['isPrivate'] = model.isPrivate # TODO Test to ensure True default data['description'] = model.description data['publishTime'] = model.publishTime @@ -3611,8 +3598,6 @@ def model_instance_version_create(self, owner_slug, model_slug, framework, instance_slug = self.split_model_instance_string( model_instance) - request = ModelInstanceNewVersionRequest( - version_notes=version_notes, files=[]) request = ApiCreateModelInstanceVersionRequest() request.owner_slug = owner_slug request.model_slug = model_slug @@ -3885,14 +3870,16 @@ def files_upload_cli(self, local_paths, inbox_path, no_resume, no_compress): if upload_file is None: continue - create_inbox_file_request = CreateInboxFileRequest( - virtual_directory=inbox_path, blob_file_token=upload_file.token) + create_inbox_file_request = CreateInboxFileRequest() + create_inbox_file_request.virtual_directory = inbox_path + create_inbox_file_request.blob_file_token = upload_file.token files_to_create.append((create_inbox_file_request, file_name)) - for (create_inbox_file_request, file_name) in files_to_create: - self.process_response( - self.with_retry(self.create_inbox_file)(create_inbox_file_request)) - print('Inbox file created:', file_name) + with self.build_kaggle_client() as kaggle: + create_inbox_file = kaggle.admin.inbox_file_client.create_inbox_file + for (create_inbox_file_request, file_name) in files_to_create: + self.with_retry(create_inbox_file)(create_inbox_file_request) + print('Inbox file created:', file_name) def file_upload_cli(self, local_path, inbox_path, no_compress, upload_context): @@ -4073,9 +4060,9 @@ def process_response(self, result): api_version = headers[self.HEADER_API_VERSION] if (not self.already_printed_version_warning and not self.is_up_to_date(api_version)): - print('Warning: Looks like you\'re using an outdated API ' - 'Version, please consider updating (server ' + api_version + - ' / client ' + self.__version__ + ')') + print(f'Warning: Looks like you\'re using an outdated `kaggle`` ' + 'version (installed: {self.__version__}, please consider ' + 'upgrading to the latest version ({api_version})') self.already_printed_version_warning = True if isinstance(data, dict) and 'code' in data and data['code'] != 200: raise Exception(data['message']) diff --git a/src/kaggle/cli.py b/src/kaggle/cli.py index 1ad8d7b..ac927f6 100644 --- a/src/kaggle/cli.py +++ b/src/kaggle/cli.py @@ -24,7 +24,8 @@ from kaggle import KaggleApi from kaggle import api -from .rest import ApiException +# from rest import ApiException +ApiException = IOError def main(): diff --git a/src/kaggle/configuration.py b/src/kaggle/configuration.py new file mode 100644 index 0000000..356cc00 --- /dev/null +++ b/src/kaggle/configuration.py @@ -0,0 +1,208 @@ +#!/usr/bin/python +# +# Copyright 2024 Kaggle Inc +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 +from __future__ import absolute_import + +import logging +import six +import sys +import urllib3 + + +class Configuration: + + def __init__(self): + """Constructor""" + # Default Base url + self.host = _get_endpoint_from_env() or "https://www.kaggle.com/api/v1" + + # Authentication Settings + # dict to store API key(s) + self.api_key = {} + # dict to store API prefix (e.g. Bearer) + self.api_key_prefix = {} + # Username for HTTP basic authentication + self.username = "" + # Password for HTTP basic authentication + self.password = "" + + # Logging Settings + self.logger = {"package_logger": logging.getLogger("kaggle"), + "urllib3_logger": logging.getLogger("urllib3")} + # Log format + self.logger_format = '%(asctime)s %(levelname)s %(message)s' + # Log stream handler + self.logger_stream_handler = None + # Log file handler + self.logger_file_handler = None + # Debug file location + self.logger_file = None + # Debug switch + self.debug = False + + + @property + def logger_file(self): + """The logger file. + + If the logger_file is None, then add stream handler and remove file + handler. Otherwise, add file handler and remove stream handler. + + :param value: The logger_file path. + :type: str + """ + return self.__logger_file + + @logger_file.setter + def logger_file(self, value): + """The logger file. + + If the logger_file is None, then add stream handler and remove file + handler. Otherwise, add file handler and remove stream handler. + + :param value: The logger_file path. + :type: str + """ + self.__logger_file = value + if self.__logger_file: + # If set logging file, + # then add file handler and remove stream handler. + self.logger_file_handler = logging.FileHandler(self.__logger_file) + self.logger_file_handler.setFormatter(self.logger_formatter) + for _, logger in six.iteritems(self.logger): + logger.addHandler(self.logger_file_handler) + if self.logger_stream_handler: + logger.removeHandler(self.logger_stream_handler) + else: + # If not set logging file, + # then add stream handler and remove file handler. + self.logger_stream_handler = logging.StreamHandler() + self.logger_stream_handler.setFormatter(self.logger_formatter) + for _, logger in six.iteritems(self.logger): + logger.addHandler(self.logger_stream_handler) + if self.logger_file_handler: + logger.removeHandler(self.logger_file_handler) + + @property + def debug(self): + """Debug status + + :param value: The debug status, True or False. + :type: bool + """ + return self.__debug + + @debug.setter + def debug(self, value): + """Debug status + + :param value: The debug status, True or False. + :type: bool + """ + self.__debug = value + if self.__debug: + # if debug status is True, turn on debug logging + for _, logger in six.iteritems(self.logger): + logger.setLevel(logging.DEBUG) + else: + # if debug status is False, turn off debug logging, + # setting log level to default `logging.WARNING` + for _, logger in six.iteritems(self.logger): + logger.setLevel(logging.WARNING) + + @property + def logger_format(self): + """The logger format. + + The logger_formatter will be updated when sets logger_format. + + :param value: The format string. + :type: str + """ + return self.__logger_format + + @logger_format.setter + def logger_format(self, value): + """The logger format. + + The logger_formatter will be updated when sets logger_format. + + :param value: The format string. + :type: str + """ + self.__logger_format = value + self.logger_formatter = logging.Formatter(self.__logger_format) + + def get_api_key_with_prefix(self, identifier): + """Gets API key (with prefix if set). + + :param identifier: The identifier of apiKey. + :return: The token for api key authentication. + """ + if (self.api_key.get(identifier) and + self.api_key_prefix.get(identifier)): + return self.api_key_prefix[identifier] + ' ' + self.api_key[identifier] # noqa: E501 + elif self.api_key.get(identifier): + return self.api_key[identifier] + + def get_basic_auth_token(self): + """Gets HTTP basic authentication header (string). + + :return: The token for basic HTTP authentication. + """ + return urllib3.util.make_headers( + basic_auth=self.username + ':' + self.password + ).get('authorization') + + def auth_settings(self): + """Gets Auth Settings dict for api client. + + :return: The Auth Settings information dict. + """ + return { + 'basicAuth': + { + 'type': 'basic', + 'in': 'header', + 'key': 'Authorization', + 'value': self.get_basic_auth_token() + }, + + } + + def to_debug_report(self): + """Gets the essential information for debugging. + + :return: The report for debugging. + """ + return "Python SDK Debug Report:\n" \ + "OS: {env}\n" \ + "Python Version: {pyversion}\n" \ + "Version of the API: 1\n" \ + "SDK Package Version: 1". \ + format(env=sys.platform, pyversion=sys.version) + + +def _get_endpoint_from_env(): + import os + endpoint = os.environ.get("KAGGLE_API_ENDPOINT") + if endpoint is None: + return None + endpoint = endpoint.rstrip("/") + if endpoint.endswith("/api/v1"): + return endpoint + return endpoint + "/api/v1" diff --git a/src/kaggle/models/api_blob_type.py b/src/kaggle/models/api_blob_type.py new file mode 100644 index 0000000..1c4c95e --- /dev/null +++ b/src/kaggle/models/api_blob_type.py @@ -0,0 +1,4 @@ +class ApiBlobType(object): + DATASET = "dataset" + MODEL = "model" + INBOX = "inbox" diff --git a/src/kaggle/models/dataset_column.py b/src/kaggle/models/dataset_column.py new file mode 100644 index 0000000..5e89b3f --- /dev/null +++ b/src/kaggle/models/dataset_column.py @@ -0,0 +1,227 @@ +#!/usr/bin/python +# +# Copyright 2024 Kaggle Inc +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + + +import pprint +import re # noqa: F401 + +import six + + +class DatasetColumn(object): + """ + Attributes: + column_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + column_types = { + 'order': 'float', + 'name': 'str', + 'type': 'str', + 'original_type': 'str', + 'description': 'str' + } + + attribute_map = { + 'order': 'order', + 'name': 'name', + 'type': 'type', + 'original_type': 'originalType', + 'description': 'description' + } + + def __init__(self, order=None, name=None, type=None, original_type=None, description=None): # noqa: E501 + """DatasetColumn - a model defined in Swagger""" # noqa: E501 + + self._order = None + self._name = None + self._type = None + self._original_type = None + self._description = None + self.discriminator = None + + if order is not None: + self.order = order + if name is not None: + self.name = name + if type is not None: + self.type = type + if original_type is not None: + self.original_type = original_type + if description is not None: + self.description = description + + @property + def order(self): + """Gets the order of this DatasetColumn. # noqa: E501 + + The order that the column comes in, 0-based. (The first column is 0, second is 1, etc.) # noqa: E501 + + :return: The order of this DatasetColumn. # noqa: E501 + :rtype: float + """ + return self._order + + @order.setter + def order(self, order): + """Sets the order of this DatasetColumn. + + The order that the column comes in, 0-based. (The first column is 0, second is 1, etc.) # noqa: E501 + + :param order: The order of this DatasetColumn. # noqa: E501 + :type: float + """ + + self._order = order + + @property + def name(self): + """Gets the name of this DatasetColumn. # noqa: E501 + + The column name # noqa: E501 + + :return: The name of this DatasetColumn. # noqa: E501 + :rtype: str + """ + return self._name + + @name.setter + def name(self, name): + """Sets the name of this DatasetColumn. + + The column name # noqa: E501 + + :param name: The name of this DatasetColumn. # noqa: E501 + :type: str + """ + + self._name = name + + @property + def type(self): + """Gets the type of this DatasetColumn. # noqa: E501 + + The type of all of the fields in the column. Please see the data types on https://github.com/Kaggle/kaggle-api/wiki/Dataset-Metadata # noqa: E501 + + :return: The type of this DatasetColumn. # noqa: E501 + :rtype: str + """ + return self._type + + @type.setter + def type(self, type): + """Sets the type of this DatasetColumn. + + The type of all of the fields in the column. Please see the data types on https://github.com/Kaggle/kaggle-api/wiki/Dataset-Metadata # noqa: E501 + + :param type: The type of this DatasetColumn. # noqa: E501 + :type: str + """ + + self._type = type + + @property + def original_type(self): + """Gets the original_type of this DatasetColumn. # noqa: E501 + + Used to store the original type of the column, which will be converted to Kaggle's types. For example, an `originalType` of `\"integer\"` would convert to a `type` of `\"numeric\"` # noqa: E501 + + :return: The original_type of this DatasetColumn. # noqa: E501 + :rtype: str + """ + return self._original_type + + @original_type.setter + def original_type(self, original_type): + """Sets the original_type of this DatasetColumn. + + Used to store the original type of the column, which will be converted to Kaggle's types. For example, an `originalType` of `\"integer\"` would convert to a `type` of `\"numeric\"` # noqa: E501 + + :param original_type: The original_type of this DatasetColumn. # noqa: E501 + :type: str + """ + + self._original_type = original_type + + @property + def description(self): + """Gets the description of this DatasetColumn. # noqa: E501 + + The description of the column # noqa: E501 + + :return: The description of this DatasetColumn. # noqa: E501 + :rtype: str + """ + return self._description + + @description.setter + def description(self, description): + """Sets the description of this DatasetColumn. + + The description of the column # noqa: E501 + + :param description: The description of this DatasetColumn. # noqa: E501 + :type: str + """ + + self._description = description + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.column_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, DatasetColumn): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Returns true if both objects are not equal""" + return not self == other diff --git a/src/kaggle/models/start_blob_upload_request.py b/src/kaggle/models/start_blob_upload_request.py new file mode 100644 index 0000000..4c78202 --- /dev/null +++ b/src/kaggle/models/start_blob_upload_request.py @@ -0,0 +1,244 @@ +#!/usr/bin/python +# +# Copyright 2024 Kaggle Inc +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +""" + Kaggle API + + API for kaggle.com # noqa: E501 + + OpenAPI spec version: 1 + + Generated by: https://github.com/swagger-api/swagger-codegen.git +""" + + +import pprint +import re # noqa: F401 + +import six + + +class StartBlobUploadRequest(object): + """NOTE: This class is auto generated by the swagger code generator program. + + Do not edit the class manually. + """ + + """ + Attributes: + swagger_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + swagger_types = { + 'type': 'object', + 'name': 'str', + 'content_length': 'int', + 'content_type': 'str', + 'last_modified_epoch_seconds': 'int' + } + + attribute_map = { + 'type': 'type', + 'name': 'name', + 'content_length': 'contentLength', + 'content_type': 'contentType', + 'last_modified_epoch_seconds': 'lastModifiedEpochSeconds' + } + + def __init__(self, type=None, name=None, content_length=None, content_type=None, last_modified_epoch_seconds=None): # noqa: E501 + """StartBlobUploadRequest - a model defined in Swagger""" # noqa: E501 + + self._type = None + self._name = None + self._content_length = None + self._content_type = None + self._last_modified_epoch_seconds = None + self.discriminator = None + + if type is not None: + self.type = type + self.name = name + self.content_length = content_length + if content_type is not None: + self.content_type = content_type + if last_modified_epoch_seconds is not None: + self.last_modified_epoch_seconds = last_modified_epoch_seconds + + @property + def type(self): + """Gets the type of this StartBlobUploadRequest. # noqa: E501 + + The type of the blob (one of \"dataset\", \"model\", \"inbox\") # noqa: E501 + + :return: The type of this StartBlobUploadRequest. # noqa: E501 + :rtype: object + """ + return self._type + + @type.setter + def type(self, type): + """Sets the type of this StartBlobUploadRequest. + + The type of the blob (one of \"dataset\", \"model\", \"inbox\") # noqa: E501 + + :param type: The type of this StartBlobUploadRequest. # noqa: E501 + :type: object + """ + + self._type = type + + @property + def name(self): + """Gets the name of this StartBlobUploadRequest. # noqa: E501 + + Name of the file # noqa: E501 + + :return: The name of this StartBlobUploadRequest. # noqa: E501 + :rtype: str + """ + return self._name + + @name.setter + def name(self, name): + """Sets the name of this StartBlobUploadRequest. + + Name of the file # noqa: E501 + + :param name: The name of this StartBlobUploadRequest. # noqa: E501 + :type: str + """ + if name is None: + raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 + + self._name = name + + @property + def content_length(self): + """Gets the content_length of this StartBlobUploadRequest. # noqa: E501 + + Content length of the file in bytes # noqa: E501 + + :return: The content_length of this StartBlobUploadRequest. # noqa: E501 + :rtype: int + """ + return self._content_length + + @content_length.setter + def content_length(self, content_length): + """Sets the content_length of this StartBlobUploadRequest. + + Content length of the file in bytes # noqa: E501 + + :param content_length: The content_length of this StartBlobUploadRequest. # noqa: E501 + :type: int + """ + if content_length is None: + raise ValueError("Invalid value for `content_length`, must not be `None`") # noqa: E501 + + self._content_length = content_length + + @property + def content_type(self): + """Gets the content_type of this StartBlobUploadRequest. # noqa: E501 + + Content/MIME type (e.g. \"text/plain\") of the file # noqa: E501 + + :return: The content_type of this StartBlobUploadRequest. # noqa: E501 + :rtype: str + """ + return self._content_type + + @content_type.setter + def content_type(self, content_type): + """Sets the content_type of this StartBlobUploadRequest. + + Content/MIME type (e.g. \"text/plain\") of the file # noqa: E501 + + :param content_type: The content_type of this StartBlobUploadRequest. # noqa: E501 + :type: str + """ + + self._content_type = content_type + + @property + def last_modified_epoch_seconds(self): + """Gets the last_modified_epoch_seconds of this StartBlobUploadRequest. # noqa: E501 + + Last modified date of file in seconds since epoch in UTC # noqa: E501 + + :return: The last_modified_epoch_seconds of this StartBlobUploadRequest. # noqa: E501 + :rtype: int + """ + return self._last_modified_epoch_seconds + + @last_modified_epoch_seconds.setter + def last_modified_epoch_seconds(self, last_modified_epoch_seconds): + """Sets the last_modified_epoch_seconds of this StartBlobUploadRequest. + + Last modified date of file in seconds since epoch in UTC # noqa: E501 + + :param last_modified_epoch_seconds: The last_modified_epoch_seconds of this StartBlobUploadRequest. # noqa: E501 + :type: int + """ + + self._last_modified_epoch_seconds = last_modified_epoch_seconds + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.swagger_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, StartBlobUploadRequest): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Returns true if both objects are not equal""" + return not self == other diff --git a/kaggle/models/collaborator.py b/src/kaggle/models/start_blob_upload_response.py similarity index 58% rename from kaggle/models/collaborator.py rename to src/kaggle/models/start_blob_upload_response.py index 16c5d6c..75adc58 100644 --- a/kaggle/models/collaborator.py +++ b/src/kaggle/models/start_blob_upload_response.py @@ -33,7 +33,7 @@ import six -class Collaborator(object): +class StartBlobUploadResponse(object): """NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. @@ -47,80 +47,74 @@ class Collaborator(object): and the value is json key in definition. """ swagger_types = { - 'username': 'str', - 'role': 'str' + 'token': 'str', + 'create_url': 'str' } attribute_map = { - 'username': 'username', - 'role': 'role' + 'token': 'token', + 'create_url': 'createUrl' } - def __init__(self, username=None, role=None): # noqa: E501 - """Collaborator - a model defined in Swagger""" # noqa: E501 + def __init__(self, token=None, create_url=None): # noqa: E501 + """StartBlobUploadResponse - a model defined in Swagger""" # noqa: E501 - self._username = None - self._role = None + self._token = None + self._create_url = None self.discriminator = None - self.username = username - self.role = role + self.token = token + self.create_url = create_url @property - def username(self): - """Gets the username of this Collaborator. # noqa: E501 + def token(self): + """Gets the token of this StartBlobUploadResponse. # noqa: E501 - Username of the collaborator # noqa: E501 + Opaque string token used to reference the new blob/file. # noqa: E501 - :return: The username of this Collaborator. # noqa: E501 + :return: The token of this StartBlobUploadResponse. # noqa: E501 :rtype: str """ - return self._username + return self._token - @username.setter - def username(self, username): - """Sets the username of this Collaborator. + @token.setter + def token(self, token): + """Sets the token of this StartBlobUploadResponse. - Username of the collaborator # noqa: E501 + Opaque string token used to reference the new blob/file. # noqa: E501 - :param username: The username of this Collaborator. # noqa: E501 + :param token: The token of this StartBlobUploadResponse. # noqa: E501 :type: str """ - if username is None: - raise ValueError("Invalid value for `username`, must not be `None`") # noqa: E501 + if token is None: + raise ValueError("Invalid value for `token`, must not be `None`") # noqa: E501 - self._username = username + self._token = token @property - def role(self): - """Gets the role of this Collaborator. # noqa: E501 + def create_url(self): + """Gets the create_url of this StartBlobUploadResponse. # noqa: E501 - Role of the collaborator # noqa: E501 + URL to use to start the upload. # noqa: E501 - :return: The role of this Collaborator. # noqa: E501 + :return: The create_url of this StartBlobUploadResponse. # noqa: E501 :rtype: str """ - return self._role + return self._create_url - @role.setter - def role(self, role): - """Sets the role of this Collaborator. + @create_url.setter + def create_url(self, create_url): + """Sets the create_url of this StartBlobUploadResponse. - Role of the collaborator # noqa: E501 + URL to use to start the upload. # noqa: E501 - :param role: The role of this Collaborator. # noqa: E501 + :param create_url: The create_url of this StartBlobUploadResponse. # noqa: E501 :type: str """ - if role is None: - raise ValueError("Invalid value for `role`, must not be `None`") # noqa: E501 - allowed_values = ["reader", "writer"] # noqa: E501 - if role not in allowed_values: - raise ValueError( - "Invalid value for `role` ({0}), must be one of {1}" # noqa: E501 - .format(role, allowed_values) - ) + if create_url is None: + raise ValueError("Invalid value for `create_url`, must not be `None`") # noqa: E501 - self._role = role + self._create_url = create_url def to_dict(self): """Returns the model properties as a dict""" @@ -156,7 +150,7 @@ def __repr__(self): def __eq__(self, other): """Returns true if both objects are equal""" - if not isinstance(other, Collaborator): + if not isinstance(other, StartBlobUploadResponse): return False return self.__dict__ == other.__dict__ diff --git a/src/kaggle/models/upload_file.py b/src/kaggle/models/upload_file.py new file mode 100644 index 0000000..98aa8ec --- /dev/null +++ b/src/kaggle/models/upload_file.py @@ -0,0 +1,172 @@ +#!/usr/bin/python +# +# Copyright 2024 Kaggle Inc +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# coding: utf-8 + +import pprint +import re # noqa: F401 + +import six + +from kaggle.models.dataset_column import DatasetColumn # noqa: F401,E501 + + +class UploadFile(object): + """ + Attributes: + column_types (dict): The key is attribute name + and the value is attribute type. + attribute_map (dict): The key is attribute name + and the value is json key in definition. + """ + column_types = { + 'token': 'str', + 'description': 'str', + 'columns': 'list[DatasetColumn]' + } + + attribute_map = { + 'token': 'token', + 'description': 'description', + 'columns': 'columns' + } + + def __init__(self, token=None, description=None, columns=None): # noqa: E501 + """UploadFile - a model defined in Swagger""" # noqa: E501 + + self._token = None + self._description = None + self._columns = None + self.discriminator = None + + if token is not None: + self.token = token + if description is not None: + self.description = description + if columns is not None: + self.columns = columns + + @property + def token(self): + """Gets the token of this UploadFile. # noqa: E501 + + A token referencing a specific file upload that can be used across requests # noqa: E501 + + :return: The token of this UploadFile. # noqa: E501 + :rtype: str + """ + return self._token + + @token.setter + def token(self, token): + """Sets the token of this UploadFile. + + A token referencing a specific file upload that can be used across requests # noqa: E501 + + :param token: The token of this UploadFile. # noqa: E501 + :type: str + """ + + self._token = token + + @property + def description(self): + """Gets the description of this UploadFile. # noqa: E501 + + The file description # noqa: E501 + + :return: The description of this UploadFile. # noqa: E501 + :rtype: str + """ + return self._description + + @description.setter + def description(self, description): + """Sets the description of this UploadFile. + + The file description # noqa: E501 + + :param description: The description of this UploadFile. # noqa: E501 + :type: str + """ + + self._description = description + + @property + def columns(self): + """Gets the columns of this UploadFile. # noqa: E501 + + A list of dataset column metadata # noqa: E501 + + :return: The columns of this UploadFile. # noqa: E501 + :rtype: list[DatasetColumn] + """ + return self._columns + + @columns.setter + def columns(self, columns): + """Sets the columns of this UploadFile. + + A list of dataset column metadata # noqa: E501 + + :param columns: The columns of this UploadFile. # noqa: E501 + :type: list[DatasetColumn] + """ + + self._columns = columns + + def to_dict(self): + """Returns the model properties as a dict""" + result = {} + + for attr, _ in six.iteritems(self.column_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + return result + + def to_str(self): + """Returns the string representation of the model""" + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + if not isinstance(other, UploadFile): + return False + + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Returns true if both objects are not equal""" + return not self == other diff --git a/tests/unit_tests.py b/tests/unit_tests.py index 93ae068..6c87905 100644 --- a/tests/unit_tests.py +++ b/tests/unit_tests.py @@ -7,7 +7,7 @@ from requests import HTTPError -from kaggle.rest import ApiException +# from kaggle.rest import ApiException from kagglesdk.datasets.types.dataset_api_service import ApiDownloadDatasetRequest sys.path.insert(0, '..') @@ -15,7 +15,7 @@ sys.path.insert(0, '..') from kaggle import api - +ApiException = IOError # Unit test names include a letter to sort them in run order. # That seemed easier and more obvious than defining a test suite. @@ -40,7 +40,7 @@ instance_name = 'test' framework_name = 'jax' kernel_name = 'testing' -dataset_name = 'kaggleapitestdataset' +dataset_name = 'kaggleapi-testdataset' up_file = 'sample_submission.csv' description = 'House prices submission message' competition = 'house-prices-advanced-regression-techniques' @@ -206,6 +206,21 @@ class TestKaggleApi(unittest.TestCase): model_metadata_file = '' instance_metadata_file = '' + # Inbox + + def test_files_upload(self): + a = 2 # Change this value to run this test. + if a - 1 == 1: + return # Only run this test when needed because it uploads an inbox file. + filename = 'tmp_file.test' + with open(filename, 'w') as f: + f.write('test') + try: + api.files_upload_cli([filename], 'kaggle-api-test', False, False) + finally: + if os.path.exists('tmp_file.test'): + os.remove('tmp_file.test') + # Kernels def test_kernels_a_list(self): @@ -231,10 +246,12 @@ def test_kernels_c_push(self): self.assertIsNotNone(push_result.ref) self.assertTrue(isinstance(push_result.version_number, int)) self.kernel_slug = md['id'] + time.sleep(30) except ApiException as e: self.fail(f"kernels_push failed: {e}") def test_kernels_d_status(self): + # AssertionError: kernels_status failed: 404 Client Error: Not Found for url: http://localhost/api/v1/kernels/status?username=stevemessick&kernelslug=testing if self.kernel_slug == '': self.test_kernels_c_push() try: @@ -248,6 +265,8 @@ def test_kernels_d_status(self): time.sleep(5) status_result = api.kernels_status(self.kernel_slug) print(status_result.status) + if count >= max_status_tries: + self.fail(f"Could not get kernel status in allowed trys. Status: {status_result.status}") end_time = time.time() print(f'kernels_status ready in {end_time-start_time}s') except ApiException as e: @@ -264,6 +283,7 @@ def test_kernels_e_list_files(self): self.fail(f"kernels_list_files failed: {e}") def test_kernels_f_output(self): + # AssertionError: kernels_output failed: 404 Client Error: Not Found for url: http://localhost/api/v1/kernels/output?username=stevemessick&kernelslug=testing fs = [] if self.kernel_slug == '': self.test_kernels_c_push() @@ -283,7 +303,7 @@ def test_kernels_f_output(self): def test_kernels_g_pull(self): if self.kernel_metadata_path == '': - self.test_kernels_b_initialize() + self.test_kernels_c_push() fs = '' try: fs = api.kernels_pull(f'{test_user}/testing', 'kernel/tmp', metadata=True) @@ -371,6 +391,8 @@ def test_competition_e_download_file(self): api.competition_download_file( competition, self.competition_file.ref, force=True) self.assertTrue(os.path.exists(self.competition_file.ref)) + api.competition_download_file( + competition, self.competition_file.ref, force=False) except ApiException as e: self.fail(f"competition_download_file failed: {e}") finally: @@ -454,10 +476,10 @@ def test_dataset_d_list_files(self): if self.dataset == '': self.test_dataset_a_list() try: - dataset_files = api.dataset_list_files(self.dataset) - self.assertIsInstance(dataset_files.files, list) - self.assertGreater(len(dataset_files.files), 0) - self.dataset_file = dataset_files.files[0] + response = api.dataset_list_files(self.dataset) + self.assertIsInstance(response.dataset_files, list) + self.assertGreater(len(response.dataset_files), 0) + self.dataset_file = response.dataset_files[0] [ self.assertTrue(hasattr(self.dataset_file, api.camel_to_snake(f))) for f in api.dataset_file_fields @@ -529,9 +551,7 @@ def test_dataset_i_create_new(self): def test_dataset_j_create_version(self): if not os.path.exists( os.path.join(dataset_directory, api.DATASET_METADATA_FILE)): - self.test_dataset_h_initialize() - update_dataset_metadata_file(self.meta_file, dataset_name, - self.version_number) + self.test_dataset_i_create_new() try: new_version = api.dataset_create_version(dataset_directory, "Notes") self.assertIsNotNone(new_version) @@ -672,7 +692,7 @@ def test_model_instance_d_files(self): self.assertIsInstance(inst_files_resp.files, list) self.assertGreater(len(inst_files_resp.files), 0) [self.assertTrue(hasattr(inst_files_resp.files[0], api.camel_to_snake(f))) - for f in api.dataset_file_fields] + for f in api.model_file_fields] except ApiException as e: self.fail(f"model_instance_files failed: {e}") @@ -763,7 +783,9 @@ def test_model_instance_x_delete(self): try: inst_update_resp = api.model_instance_delete(self.model_instance, True) self.assertIsNotNone(inst_update_resp) - self.assertEqual(len(inst_update_resp.error), 0) + if len(inst_update_resp.error): + print(inst_update_resp.error) + self.assertEquals(len(inst_update_resp.error), 0) except ApiException as e: self.fail(f"model_instance_delete failed: {e}") diff --git a/tools/GeneratePythonLibrary.sh b/tools/GeneratePythonLibrary.sh index d0785ba..6942a7f 100755 --- a/tools/GeneratePythonLibrary.sh +++ b/tools/GeneratePythonLibrary.sh @@ -109,9 +109,8 @@ function create-local-creds { chmod 600 $kaggle_config_file } -function generate-from-swagger { - java -jar ./tools/swagger-codegen-cli.jar generate -i $SWAGGER_YAML -c $SWAGGER_CONFIG -l python || true - +function generate-package { + # TODO Remove this. if [[ -f "kaggle/api/__init__.py" ]]; then sed -i -e 's/kaggle_api/kaggle_api_extended/g' kaggle/api/__init__.py fi @@ -133,20 +132,12 @@ def _get_endpoint_from_env(): return endpoint + "/api/v1" END fi - - if [[ -f "kaggle/rest.py" ]]; then - sed -i -e "/if 'Content-Type' not in headers:/,+2d" kaggle/rest.py - fi - - if [[ -f "kaggle/api_client.py" ]]; then - sed -i -e "/def __del__/,+3d" kaggle/api_client.py - fi } function copy-src { cp ./src/setup.py . cp ./src/setup.cfg . - cp -r ./src/kaggle/* ./kaggle/ + cp -r ./src/kaggle . } function run-autogen { @@ -184,7 +175,6 @@ function install-package { function cleanup { cd $SELF_DIR rm -rf tox.ini \ - requirements.txt \ test-requirements.txt \ test \ .swagger-codegen \ @@ -204,7 +194,7 @@ function cleanup { function run { reset - generate-from-swagger + generate-package copy-src run-autogen install-package @@ -222,7 +212,7 @@ function watch-swagger { while inotifywait -q -r $WATCHED_EVENTS --format "%e %w%f" $watched_paths; do echo "Deleting $SELF_DIR/kaggle/ $SELF_DIR/kaggle/" rm -rf $SELF_DIR/kaggle/* - generate-from-swagger + generate-package run-autogen copy-src echo -e "\nWatching for changes to Swagger config..." @@ -235,7 +225,7 @@ function watch-src { echo "Watching for changes under \"src\"..." while inotifywait -q -r $WATCHED_EVENTS --format "%e %w%f" $watched_paths; do # Do not delete the output directory when there is no Swagger change to avoid - # having to run generate-from-swagger for each small code change as Swagger code + # having to run generate-package for each small code change as Swagger code # generation is a bit slow (can take 2-3 seconds). echo "Copying changes..." copy-src