├── .gcloudignore
├── .github
└── workflows
│ └── no-response.yaml
├── .gitignore
├── CHANGELOG.md
├── CONTRIBUTING.md
├── LICENSE.txt
├── MANIFEST.in
├── README.md
├── SECURITY.md
├── docs
├── ApiBlobType.md
├── Collaborator.md
├── CreateInboxFileRequest.md
├── DatasetColumn.md
├── DatasetNewRequest.md
├── DatasetNewVersionRequest.md
├── DatasetUpdateSettingsRequest.md
├── Error.md
├── KaggleApi.md
├── KernelPushRequest.md
├── License.md
├── ModelInstanceNewVersionRequest.md
├── ModelInstanceUpdateRequest.md
├── ModelNewInstanceRequest.md
├── ModelNewRequest.md
├── ModelUpdateRequest.md
├── README.md
├── Result.md
├── StartBlobUploadRequest.md
├── StartBlobUploadResponse.md
├── UploadFile.md
├── conf.py
├── datasets_metadata.md
├── index.rst
├── kernels_metadata.md
├── model_card.md
├── modelinstance_usage.md
└── models_metadata.md
├── documentation
├── competitions.md
├── configuration.md
├── datasets.md
├── index.md
├── kernels.md
├── model_instance_versions.md
├── model_instances.md
├── models.md
└── tutorials.md
├── integration_tests
└── test_models.py
├── pyproject.toml
├── requirements.in
├── requirements.txt
├── src
├── kaggle
│ ├── LICENSE
│ ├── __init__.py
│ ├── api
│ │ ├── __init__.py
│ │ ├── kaggle_api.py
│ │ └── kaggle_api_extended.py
│ ├── cli.py
│ ├── configuration.py
│ ├── models
│ │ ├── __init__.py
│ │ ├── api_blob_type.py
│ │ ├── dataset_column.py
│ │ ├── dataset_new_request.py
│ │ ├── dataset_new_version_request.py
│ │ ├── dataset_update_settings_request.py
│ │ ├── kaggle_models_extended.py
│ │ ├── kernel_push_request.py
│ │ ├── model_instance_new_version_request.py
│ │ ├── model_instance_update_request.py
│ │ ├── model_new_instance_request.py
│ │ ├── model_new_request.py
│ │ ├── model_update_request.py
│ │ ├── start_blob_upload_request.py
│ │ ├── start_blob_upload_response.py
│ │ └── upload_file.py
│ └── test
│ │ ├── __init__.py
│ │ └── test_authenticate.py
└── kagglesdk
│ ├── LICENSE
│ ├── __init__.py
│ ├── admin
│ ├── __init__.py
│ ├── services
│ │ ├── __init__.py
│ │ └── inbox_file_service.py
│ └── types
│ │ ├── __init__.py
│ │ └── inbox_file_service.py
│ ├── blobs
│ ├── __init__.py
│ ├── services
│ │ ├── __init__.py
│ │ └── blob_api_service.py
│ └── types
│ │ ├── __init__.py
│ │ └── blob_api_service.py
│ ├── common
│ ├── __init__.py
│ └── types
│ │ ├── __init__.py
│ │ ├── file_download.py
│ │ └── http_redirect.py
│ ├── competitions
│ ├── __init__.py
│ ├── services
│ │ ├── __init__.py
│ │ └── competition_api_service.py
│ └── types
│ │ ├── __init__.py
│ │ ├── competition_api_service.py
│ │ ├── competition_enums.py
│ │ └── submission_status.py
│ ├── datasets
│ ├── __init__.py
│ ├── services
│ │ ├── __init__.py
│ │ └── dataset_api_service.py
│ └── types
│ │ ├── __init__.py
│ │ ├── dataset_api_service.py
│ │ ├── dataset_enums.py
│ │ └── dataset_types.py
│ ├── education
│ ├── __init__.py
│ ├── services
│ │ ├── __init__.py
│ │ └── education_api_service.py
│ └── types
│ │ ├── __init__.py
│ │ ├── education_api_service.py
│ │ └── education_service.py
│ ├── kaggle_client.py
│ ├── kaggle_env.py
│ ├── kaggle_http_client.py
│ ├── kaggle_object.py
│ ├── kernels
│ ├── __init__.py
│ ├── services
│ │ ├── __init__.py
│ │ └── kernels_api_service.py
│ └── types
│ │ ├── __init__.py
│ │ ├── kernels_api_service.py
│ │ └── kernels_enums.py
│ ├── models
│ ├── __init__.py
│ ├── services
│ │ ├── __init__.py
│ │ ├── model_api_service.py
│ │ └── model_service.py
│ └── types
│ │ ├── __init__.py
│ │ ├── model_api_service.py
│ │ ├── model_enums.py
│ │ ├── model_service.py
│ │ └── model_types.py
│ ├── security
│ ├── __init__.py
│ ├── services
│ │ ├── __init__.py
│ │ └── oauth_service.py
│ └── types
│ │ ├── __init__.py
│ │ ├── authentication.py
│ │ └── oauth_service.py
│ ├── test
│ └── test_client.py
│ └── users
│ ├── __init__.py
│ ├── services
│ ├── __init__.py
│ └── account_service.py
│ └── types
│ ├── __init__.py
│ ├── account_service.py
│ └── users_enums.py
├── tests
├── dataset
│ └── data.csv
├── kernel
│ └── testing-x.ipynb
├── model
│ └── instance
│ │ ├── data.csv
│ │ └── version
│ │ └── metadata.json
├── sample_submission.csv
├── test_commands.sh
└── unit_tests.py
└── tools
├── GeneratePythonLibrary.sh
├── cicd
└── integration-tests.yaml
├── releases
├── Dockerfile
├── cloudbuild.yaml
├── requirements.in
└── requirements.txt
├── use-localhost.sh
└── use-prod.sh
/.gcloudignore:
--------------------------------------------------------------------------------
1 | python
--------------------------------------------------------------------------------
/.github/workflows/no-response.yaml:
--------------------------------------------------------------------------------
1 | name: No Response
2 |
3 | # Both `issue_comment` and `scheduled` event types are required for this Action
4 | # to work properly.
5 | on:
6 | issue_comment:
7 | types: [created]
8 | schedule:
9 | # Schedule for five minutes after midnight, every day
10 | - cron: '5 0 * * *'
11 |
12 | # By specifying the access of one of the scopes, all of those that are not
13 | # specified are set to 'none'.
14 | permissions:
15 | issues: write
16 |
17 | jobs:
18 | noResponse:
19 | runs-on: ubuntu-latest
20 | steps:
21 | - uses: lee-dohm/no-response@9bb0a4b5e6a45046f00353d5de7d90fb8bd773bb
22 | with:
23 | token: ${{ github.token }}
24 | # Comment to post when closing an Issue for lack of response. Set to `false` to disable
25 | closeComment: >
26 | Without additional information we're not able to resolve this issue,
27 | so it will be closed at this time. You're still free to add more info
28 | and respond to any questions above, though. We'll reopen the case
29 | if you do. Thanks for your contribution!
30 | # Number of days of inactivity before an issue is closed for lack of response.
31 | daysUntilClose: 14
32 | # Label requiring a response.
33 | responseRequiredLabel: "waiting for response"
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | env/
12 | build/
13 | develop-eggs/
14 | dist/
15 | downloads/
16 | eggs/
17 | .eggs/
18 | kaggle/
19 | kagglesdk/
20 | !src/kaggle/
21 | !src/kagglesdk/
22 | lib/
23 | lib64/
24 | parts/
25 | sdist/
26 | var/
27 | *.egg-info/
28 | .installed.cfg
29 | *.egg
30 |
31 | # PyInstaller
32 | # Usually these files are written by a python script from a template
33 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
34 | *.manifest
35 | *.spec
36 |
37 | # Installer logs
38 | pip-log.txt
39 | pip-delete-this-directory.txt
40 |
41 | # Unit test / coverage reports
42 | htmlcov/
43 | .tox/
44 | .coverage
45 | .coverage.*
46 | .cache
47 | nosetests.xml
48 | coverage.xml
49 | *,cover
50 | .hypothesis/
51 | venv/
52 | .python-version
53 |
54 | # Translations
55 | *.mo
56 | *.pot
57 |
58 | # Django stuff:
59 | *.log
60 |
61 | # Sphinx documentation
62 | docs/_build/
63 |
64 | # PyBuilder
65 | target/
66 |
67 | #Ipython Notebook
68 | .ipynb_checkpoints
69 |
70 | # Rider/IntelliJ
71 | .idea/
72 |
73 | # Gemini
74 | GEMINI.md
75 | .gemini
76 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # How to Contribute
2 |
3 | We'd love to accept your patches and contributions to this project. There are
4 | just a few small guidelines you need to follow.
5 |
6 | ## Contributor License Agreement
7 |
8 | Contributions to this project must be accompanied by a Contributor License
9 | Agreement. You (or your employer) retain the copyright to your contribution;
10 | this simply gives us permission to use and redistribute your contributions as
11 | part of the project. Head over to to see
12 | your current agreements on file or to sign a new one.
13 |
14 | You generally only need to submit a CLA once, so if you've already submitted one
15 | (even if it was for a different project), you probably don't need to do it
16 | again.
17 |
18 | ## Code reviews
19 |
20 | All submissions, including submissions by project members, require review. We
21 | use GitHub pull requests for this purpose. Consult
22 | [GitHub Help](https://help.github.com/articles/about-pull-requests/) for more
23 | information on using pull requests.
24 |
25 | ## Community Guidelines
26 |
27 | This project follows [Google's Open Source Community
28 | Guidelines](https://opensource.google.com/conduct/).
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include LICENSE.txt
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Kaggle API
2 |
3 | Official API for https://www.kaggle.com, accessible using a command line tool implemented in Python 3.
4 |
5 | [User documentation](docs/README.md)
6 |
7 | ## Installation
8 |
9 | Ensure you have Python 3 and the package manager `pip` installed.
10 |
11 | Run the following command to access the Kaggle API using the command line:
12 |
13 | ```sh
14 | pip install kaggle
15 | ```
16 |
17 | ## Development
18 |
19 | ### Kaggle Internal
20 |
21 | Obviously, this depends on Kaggle services. When you're extending the API and modifying
22 | or adding to those services, you should be working in your Kaggle mid-tier development
23 | environment. You'll run Kaggle locally, in the container, and test the Python code by
24 | running it in the container so it can connect to your local testing environment.
25 |
26 | Also, run the following command to get `autogen.sh` installed:
27 | ```bash
28 | rm -rf /tmp/autogen && mkdir -p /tmp/autogen && unzip -qo /tmp/autogen.zip -d /tmp/autogen &&
29 | mv /tmp/autogen/autogen-*/* /tmp/autogen && rm -rf /tmp/autogen/autogen-* &&
30 | sudo chmod a+rx /tmp/autogen/autogen.sh
31 | ```
32 |
33 | ### Prerequisites
34 |
35 | We use [hatch](https://hatch.pypa.io) to manage this project.
36 |
37 | Follow these [instructions](https://hatch.pypa.io/latest/install/) to install it.
38 |
39 | If you are working in a managed environment, you may want to use `pipx`. If it isn't already installed
40 | try `sudo apt install pipx`. Then you should be able to proceed with `pipx install hatch`.
41 |
42 | ### Dependencies
43 |
44 | ```sh
45 | hatch run install-deps
46 | ```
47 |
48 | ### Compile
49 |
50 | ```sh
51 | hatch run compile
52 | ```
53 |
54 | The compiled files are generated in the `kaggle/` directory from the `src/` directory.
55 |
56 | All the changes must be done in the `src/` directory.
57 |
58 | ### Run
59 |
60 | Use `hatch run install` to compile the program and install it in the default `hatch` environment.
61 | To run that version locally for testing, use hatch: `hatch run kaggle -v`. If you'd rather not
62 | type `hatch run` every time, launch a new shell in the hatch environment: `hatch shell`.
63 |
64 | You can also run the code in python directly:
65 |
66 | ```sh
67 | hatch run python
68 | ```
69 |
70 | ```python
71 | import kaggle
72 | from kaggle.api.kaggle_api_extended import KaggleApi
73 | api = KaggleApi()
74 | api.authenticate()
75 | api.model_list_cli()
76 |
77 | Next Page Token = [...]
78 | [...]
79 |
80 | ```
81 |
82 | Or in a single command:
83 |
84 | ```sh
85 | hatch run python -c "import kaggle; from kaggle.api.kaggle_api_extended import KaggleApi; api = KaggleApi(); api.authenticate(); api.model_list_cli()"
86 | ```
87 |
88 | ### Example
89 |
90 | Let's change the `model_list_cli` method in the source file:
91 |
92 | ```sh
93 | ❯ git diff src/kaggle/api/kaggle_api_extended.py
94 | [...]
95 | + print('hello Kaggle CLI update')^M
96 | models = self.model_list(sort_by, search, owner, page_size, page_token)
97 | [...]
98 |
99 | ❯ hatch run compile
100 | [...]
101 |
102 | ❯ hatch run python -c "import kaggle; from kaggle.api.kaggle_api_extended import KaggleApi; api = KaggleApi(); api.authenticate(); api.model_list_cli()"
103 | hello Kaggle CLI update
104 | Next Page Token = [...]
105 | ```
106 |
107 | ### Integration Tests
108 |
109 | To run integration tests on your local machine, you need to set up your Kaggle API credentials. You can do this in one of these two ways described [this doc](docs/README.md). Refer to the sections:
110 | - Using environment variables
111 | - Using credentials file
112 |
113 | After setting up your credentials by any of these methods, you can run the integration tests as follows:
114 |
115 | ```sh
116 | # Run all tests
117 | hatch run integration-test
118 | ```
119 |
120 | ## License
121 |
122 | The Kaggle API is released under the [Apache 2.0 license](LICENSE).
123 |
--------------------------------------------------------------------------------
/SECURITY.md:
--------------------------------------------------------------------------------
1 | # Security Policy
2 |
3 | ## Supported Versions
4 |
5 | Security updates are applied only to the latest release.
6 |
7 | ## Reporting a Vulnerability
8 |
9 | If you have discovered a security vulnerability in this project, please report it privately. **Do not disclose it as a public issue.** This gives us time to work with you to fix the issue before public exposure, reducing the chance that the exploit will be used before a patch is released.
10 |
11 | Please disclose it at [security advisory](https://github.com/Kaggle/kaggle-api/security/advisories/new).
12 |
13 | The vulnerabilities will be addressed as soon as possible, with a maximum of 90 days before a public exposure.
14 |
--------------------------------------------------------------------------------
/docs/ApiBlobType.md:
--------------------------------------------------------------------------------
1 | # ApiBlobType
2 |
3 | ## Properties
4 | Name | Type | Description | Notes
5 | ------------ | ------------- | ------------- | -------------
6 |
7 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
8 |
9 |
10 |
--------------------------------------------------------------------------------
/docs/Collaborator.md:
--------------------------------------------------------------------------------
1 | # Collaborator
2 |
3 | ## Properties
4 | Name | Type | Description | Notes
5 | ------------ | ------------- | ------------- | -------------
6 | **username** | **str** | Username of the collaborator |
7 | **role** | **str** | Role of the collaborator |
8 |
9 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
10 |
11 |
12 |
--------------------------------------------------------------------------------
/docs/CreateInboxFileRequest.md:
--------------------------------------------------------------------------------
1 | # CreateInboxFileRequest
2 |
3 | ## Properties
4 | Name | Type | Description | Notes
5 | ------------ | ------------- | ------------- | -------------
6 | **virtual_directory** | **str** | Directory name used for tagging the uploaded file |
7 | **blob_file_token** | **str** | Token representing the uploaded file |
8 |
9 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
10 |
11 |
12 |
--------------------------------------------------------------------------------
/docs/DatasetColumn.md:
--------------------------------------------------------------------------------
1 | # DatasetColumn
2 |
3 | ## Properties
4 | Name | Type | Description | Notes
5 | ------------ | ------------- | ------------- | -------------
6 | **order** | **float** | The order that the column comes in, 0-based. (The first column is 0, second is 1, etc.) | [optional]
7 | **name** | **str** | The column name | [optional]
8 | **type** | **str** | The type of all of the fields in the column. Please see the data types on https://github.com/Kaggle/kaggle-api/wiki/Dataset-Metadata | [optional]
9 | **original_type** | **str** | Used to store the original type of the column, which will be converted to Kaggle's types. For example, an `originalType` of `\"integer\"` would convert to a `type` of `\"numeric\"` | [optional]
10 | **description** | **str** | The description of the column | [optional]
11 |
12 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
13 |
14 |
15 |
--------------------------------------------------------------------------------
/docs/DatasetNewRequest.md:
--------------------------------------------------------------------------------
1 | # DatasetNewRequest
2 |
3 | ## Properties
4 | Name | Type | Description | Notes
5 | ------------ | ------------- | ------------- | -------------
6 | **title** | **str** | The title of the new dataset |
7 | **slug** | **str** | The slug that the dataset should be created with | [optional]
8 | **owner_slug** | **str** | The owner's username | [optional]
9 | **license_name** | **str** | The license that should be associated with the dataset | [optional] [default to 'unknown']
10 | **subtitle** | **str** | The subtitle to be set on the dataset | [optional]
11 | **description** | **str** | The description to be set on the dataset | [optional] [default to '']
12 | **files** | [**list[UploadFile]**](UploadFile.md) | A list of files that should be associated with the dataset |
13 | **is_private** | **bool** | Whether or not the dataset should be private | [optional] [default to True]
14 | **convert_to_csv** | **bool** | Whether or not a tabular dataset should be converted to csv | [optional] [default to True]
15 | **category_ids** | **list[str]** | A list of tag IDs to associated with the dataset | [optional]
16 |
17 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
18 |
19 |
20 |
--------------------------------------------------------------------------------
/docs/DatasetNewVersionRequest.md:
--------------------------------------------------------------------------------
1 | # DatasetNewVersionRequest
2 |
3 | ## Properties
4 | Name | Type | Description | Notes
5 | ------------ | ------------- | ------------- | -------------
6 | **version_notes** | **str** | The version notes for the new dataset version |
7 | **subtitle** | **str** | The subtitle to set on the dataset | [optional]
8 | **description** | **str** | The description to set on the dataset | [optional]
9 | **files** | [**list[UploadFile]**](UploadFile.md) | A list of files that should be associated with the dataset |
10 | **convert_to_csv** | **bool** | Whether or not a tabular dataset should be converted to csv | [optional] [default to True]
11 | **category_ids** | **list[str]** | A list of tag IDs to associated with the dataset | [optional]
12 | **delete_old_versions** | **bool** | Whether or not all previous versions of the dataset should be deleted upon creating the new version | [optional] [default to False]
13 |
14 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
15 |
16 |
17 |
--------------------------------------------------------------------------------
/docs/DatasetUpdateSettingsRequest.md:
--------------------------------------------------------------------------------
1 | # DatasetUpdateSettingsRequest
2 |
3 | ## Properties
4 | Name | Type | Description | Notes
5 | ------------ | ------------- | ------------- | -------------
6 | **title** | **str** | Title of the dataset | [optional]
7 | **subtitle** | **str** | Subtitle of the dataset | [optional]
8 | **description** | **str** | Decription of the dataset | [optional]
9 | **is_private** | **bool** | Whether or not the dataset should be private | [optional]
10 | **licenses** | **list[object]** | A list of licenses that apply to this dataset | [optional]
11 | **keywords** | **list[str]** | A list of keywords that apply to this dataset | [optional]
12 | **collaborators** | **list[object]** | A list of collaborators that may read or edit this dataset | [optional]
13 | **data** | **list[object]** | A list containing metadata for each file in the dataset | [optional]
14 |
15 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
16 |
17 |
18 |
--------------------------------------------------------------------------------
/docs/Error.md:
--------------------------------------------------------------------------------
1 | # Error
2 |
3 | ## Properties
4 | Name | Type | Description | Notes
5 | ------------ | ------------- | ------------- | -------------
6 | **code** | **int** | The server error code returned | [optional]
7 | **message** | **str** | The error message generated by the server | [optional]
8 |
9 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
10 |
11 |
12 |
--------------------------------------------------------------------------------
/docs/KernelPushRequest.md:
--------------------------------------------------------------------------------
1 | # KernelPushRequest
2 |
3 | ## Properties
4 | Name | Type | Description | Notes
5 | ------------ | ------------- | ------------- | -------------
6 | **id** | **int** | The kernel's ID number. One of `id` and `slug` are required. If both are specified, `id` will be preferred | [optional]
7 | **slug** | **str** | The full slug of the kernel to push to, in the format `USERNAME/KERNEL-SLUG`. The kernel slug must be the title lowercased with dashes (`-`) replacing spaces. One of `id` and `slug` are required. If both are specified, `id` will be preferred | [optional]
8 | **new_title** | **str** | The title to be set on the kernel | [optional]
9 | **text** | **str** | The kernel's source code |
10 | **language** | **str** | The language that the kernel is written in |
11 | **kernel_type** | **str** | The type of kernel. Cannot be changed once the kernel has been created |
12 | **is_private** | **bool** | Whether or not the kernel should be private | [optional]
13 | **enable_gpu** | **bool** | Whether or not the kernel should run on a GPU | [optional]
14 | **enable_tpu** | **bool** | Whether or not the kernel should run on a TPU | [optional]
15 | **enable_internet** | **bool** | Whether or not the kernel should be able to access the internet | [optional]
16 | **dataset_data_sources** | **list[str]** | A list of dataset data sources that the kernel should use. Each dataset is specified as `USERNAME/DATASET-SLUG` | [optional]
17 | **competition_data_sources** | **list[str]** | A list of competition data sources that the kernel should use | [optional]
18 | **kernel_data_sources** | **list[str]** | A list of kernel data sources that the kernel should use. Each dataset is specified as `USERNAME/KERNEL-SLUG` | [optional]
19 | **model_data_sources** | **list[str]** | A list of model data sources that the kernel should use. Each model is specified as `USERNAME/MODEL-SLUG/FRAMEWORK/VARIATION-SLUG/VERSION-NUMBER` | [optional]
20 | **category_ids** | **list[str]** | A list of tag IDs to associated with the kernel | [optional]
21 | **docker_image_pinning_type** | **str** | Which docker image to use for executing new versions going forward. | [optional]
22 |
23 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
24 |
25 |
26 |
--------------------------------------------------------------------------------
/docs/License.md:
--------------------------------------------------------------------------------
1 | # License
2 |
3 | ## Properties
4 | Name | Type | Description | Notes
5 | ------------ | ------------- | ------------- | -------------
6 | **name** | **str** | Name of the license |
7 |
8 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
9 |
10 |
11 |
--------------------------------------------------------------------------------
/docs/ModelInstanceNewVersionRequest.md:
--------------------------------------------------------------------------------
1 | # ModelInstanceNewVersionRequest
2 |
3 | ## Properties
4 | Name | Type | Description | Notes
5 | ------------ | ------------- | ------------- | -------------
6 | **version_notes** | **str** | The version notes for the model instance version | [optional]
7 | **files** | [**list[UploadFile]**](UploadFile.md) | A list of files that should be associated with the model instance version |
8 |
9 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
10 |
11 |
12 |
--------------------------------------------------------------------------------
/docs/ModelInstanceUpdateRequest.md:
--------------------------------------------------------------------------------
1 | # ModelInstanceUpdateRequest
2 |
3 | ## Properties
4 | Name | Type | Description | Notes
5 | ------------ | ------------- | ------------- | -------------
6 | **overview** | **str** | The overview of the model instance (markdown) | [optional]
7 | **usage** | **str** | The description of how to use the model instance (markdown) | [optional]
8 | **license_name** | **str** | The license that should be associated with the model instance | [optional] [default to 'Apache 2.0']
9 | **fine_tunable** | **bool** | Whether the model instance is fine tunable | [optional] [default to True]
10 | **training_data** | **list[str]** | A list of training data (urls or names) | [optional]
11 | **model_instance_type** | **str** | Whether the model instance is a base model, external variant, internal variant, or unspecified | [optional]
12 | **base_model_instance** | **str** | If this is an internal variant, the `{owner-slug}/{model-slug}/{framework}/{instance-slug}` of the base model instance | [optional]
13 | **external_base_model_url** | **int** | If this is an external variant, a URL to the base model | [optional]
14 | **update_mask** | **str** | Describes which fields to update |
15 |
16 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
17 |
18 |
19 |
--------------------------------------------------------------------------------
/docs/ModelNewInstanceRequest.md:
--------------------------------------------------------------------------------
1 | # ModelNewInstanceRequest
2 |
3 | ## Properties
4 | Name | Type | Description | Notes
5 | ------------ | ------------- | ------------- | -------------
6 | **instance_slug** | **str** | The slug that the model instance should be created with |
7 | **framework** | **str** | The framework of the model instance |
8 | **overview** | **str** | The overview of the model instance (markdown) | [optional]
9 | **usage** | **str** | The description of how to use the model instance (markdown) | [optional]
10 | **license_name** | **str** | The license that should be associated with the model instance | [default to 'Apache 2.0']
11 | **fine_tunable** | **bool** | Whether the model instance is fine tunable | [optional] [default to True]
12 | **training_data** | **list[str]** | A list of training data (urls or names) | [optional]
13 | **model_instance_type** | **str** | Whether the model instance is a base model, external variant, internal variant, or unspecified | [optional]
14 | **base_model_instance** | **str** | If this is an internal variant, the `{owner-slug}/{model-slug}/{framework}/{instance-slug}` of the base model instance | [optional]
15 | **external_base_model_url** | **int** | If this is an external variant, a URL to the base model | [optional]
16 | **files** | [**list[UploadFile]**](UploadFile.md) | A list of files that should be associated with the model instance version | [optional]
17 |
18 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
19 |
20 |
21 |
--------------------------------------------------------------------------------
/docs/ModelNewRequest.md:
--------------------------------------------------------------------------------
1 | # ModelNewRequest
2 |
3 | ## Properties
4 | Name | Type | Description | Notes
5 | ------------ | ------------- | ------------- | -------------
6 | **owner_slug** | **str** | The owner's slug |
7 | **slug** | **str** | The slug that the model should be created with |
8 | **title** | **str** | The title of the new model |
9 | **subtitle** | **str** | The subtitle of the new model | [optional]
10 | **is_private** | **bool** | Whether or not the model should be private | [default to True]
11 | **description** | **str** | The description to be set on the model | [optional] [default to '']
12 | **publish_time** | **date** | When the model was initially published | [optional]
13 | **provenance_sources** | **str** | The provenance sources to be set on the model | [optional] [default to '']
14 |
15 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
16 |
17 |
18 |
--------------------------------------------------------------------------------
/docs/ModelUpdateRequest.md:
--------------------------------------------------------------------------------
1 | # ModelUpdateRequest
2 |
3 | ## Properties
4 | Name | Type | Description | Notes
5 | ------------ | ------------- | ------------- | -------------
6 | **title** | **str** | The title of the new model | [optional]
7 | **subtitle** | **str** | The subtitle of the new model | [optional]
8 | **is_private** | **bool** | Whether or not the model should be private | [optional] [default to True]
9 | **description** | **str** | The description to be set on the model | [optional] [default to '']
10 | **publish_time** | **date** | When the model was initially published | [optional]
11 | **provenance_sources** | **str** | The provenance sources to be set on the model | [optional] [default to '']
12 | **update_mask** | **str** | Describes which fields to update | [optional]
13 |
14 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
15 |
16 |
17 |
--------------------------------------------------------------------------------
/docs/Result.md:
--------------------------------------------------------------------------------
1 | # Result
2 |
3 | ## Properties
4 | Name | Type | Description | Notes
5 | ------------ | ------------- | ------------- | -------------
6 |
7 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
8 |
9 |
10 |
--------------------------------------------------------------------------------
/docs/StartBlobUploadRequest.md:
--------------------------------------------------------------------------------
1 | # StartBlobUploadRequest
2 |
3 | ## Properties
4 | Name | Type | Description | Notes
5 | ------------ | ------------- | ------------- | -------------
6 | **type** | **object** | The type of the blob (one of \"dataset\", \"model\", \"inbox\") | [optional]
7 | **name** | **str** | Name of the file |
8 | **content_length** | **int** | Content length of the file in bytes |
9 | **content_type** | **str** | Content/MIME type (e.g. \"text/plain\") of the file | [optional]
10 | **last_modified_epoch_seconds** | **int** | Last modified date of file in seconds since epoch in UTC | [optional]
11 |
12 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
13 |
14 |
15 |
--------------------------------------------------------------------------------
/docs/StartBlobUploadResponse.md:
--------------------------------------------------------------------------------
1 | # StartBlobUploadResponse
2 |
3 | ## Properties
4 | Name | Type | Description | Notes
5 | ------------ | ------------- | ------------- | -------------
6 | **token** | **str** | Opaque string token used to reference the new blob/file. |
7 | **create_url** | **str** | URL to use to start the upload. |
8 |
9 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
10 |
11 |
12 |
--------------------------------------------------------------------------------
/docs/UploadFile.md:
--------------------------------------------------------------------------------
1 | # UploadFile
2 |
3 | ## Properties
4 | Name | Type | Description | Notes
5 | ------------ | ------------- | ------------- | -------------
6 | **token** | **str** | A token referencing a specific file upload that can be used across requests | [optional]
7 | **description** | **str** | The file description | [optional]
8 | **columns** | [**list[DatasetColumn]**](DatasetColumn.md) | A list of dataset column metadata | [optional]
9 |
10 | [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
11 |
12 |
13 |
--------------------------------------------------------------------------------
/docs/conf.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | #
3 | # Copyright 2024 Kaggle Inc
4 | #
5 | # Licensed under the Apache License, Version 2.0 (the "License");
6 | # you may not use this file except in compliance with the License.
7 | # You may obtain a copy of the License at
8 | #
9 | # http://www.apache.org/licenses/LICENSE-2.0
10 | #
11 | # Unless required by applicable law or agreed to in writing, software
12 | # distributed under the License is distributed on an "AS IS" BASIS,
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | # See the License for the specific language governing permissions and
15 | # limitations under the License.
16 |
17 | # Configuration file for the Sphinx documentation builder.
18 | #
19 | # This file only contains a selection of the most common options. For a full
20 | # list see the documentation:
21 | # https://www.sphinx-doc.org/en/master/usage/configuration.html
22 |
23 | # -- Path setup --------------------------------------------------------------
24 |
25 | # If extensions (or modules to document with autodoc) are in another directory,
26 | # add these directories to sys.path here. If the directory is relative to the
27 | # documentation root, use os.path.abspath to make it absolute, like shown here.
28 |
29 | import os
30 | import sys
31 |
32 | sys.path.insert(0, os.path.abspath('.'))
33 |
34 | # -- Project information -----------------------------------------------------
35 |
36 | project = 'kaggle'
37 | copyright = '2024, kaggle'
38 | author = 'kaggle'
39 |
40 | # -- General configuration ---------------------------------------------------
41 |
42 | # Add any Sphinx extension module names here, as strings. They can be
43 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
44 | # ones.
45 | extensions = ['sphinxarg.ext']
46 |
47 | # Add any paths that contain templates here, relative to this directory.
48 | templates_path = ['_templates']
49 |
50 | # List of patterns, relative to source directory, that match files and
51 | # directories to ignore when looking for source files.
52 | # This pattern also affects html_static_path and html_extra_path.
53 | exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
54 |
55 | # -- Options for HTML output -------------------------------------------------
56 |
57 | # The theme to use for HTML and HTML Help pages. See the documentation for
58 | # a list of builtin themes.
59 | #
60 | html_theme = 'alabaster'
61 |
62 | # Add any paths that contain custom static files (such as style sheets) here,
63 | # relative to this directory. They are copied after the builtin static files,
64 | # so a file named "default.css" will overwrite the builtin "default.css".
65 | html_static_path = ['_static']
66 |
--------------------------------------------------------------------------------
/docs/datasets_metadata.md:
--------------------------------------------------------------------------------
1 | The Kaggle API follows the [Data Package specification](https://frictionlessdata.io/specs/data-package/) for specifying metadata when creating new Datasets and Dataset versions. Next to your files, you have to put a special `dataset-metadata.json` file in your upload folder alongside the files for each new Dataset (version).
2 |
3 | Here's a basic example for `dataset-metadata.json`:
4 | ```
5 | {
6 | "title": "My Awesome Dataset",
7 | "id": "timoboz/my-awesome-dataset",
8 | "licenses": [{"name": "CC0-1.0"}]
9 | }
10 | ```
11 | You can also use the API command `kaggle datasets init -p /path/to/dataset` to have the API create this file for you.
12 |
13 | Here's an example containing file metadata:
14 | ```
15 | {
16 | "title": "My Awesome Dataset",
17 | "subtitle": "My awesomer subtitle",
18 | "description": "My awesomest description",
19 | "id": "timoboz/my-awesome-dataset",
20 | "id_no": 12345,
21 | "licenses": [{"name": "CC0-1.0"}],
22 | "resources": [
23 | {
24 | "path": "my-awesome-data.csv",
25 | "description": "This is my awesome data!",
26 | "schema": {
27 | "fields": [
28 | {
29 | "name": "StringField",
30 | "description": "String field description",
31 | "type": "string"
32 | },
33 | {
34 | "name": "NumberField",
35 | "description": "Number field description",
36 | "type": "number"
37 | },
38 | {
39 | "name": "DateTimeField",
40 | "description": "Date time field description",
41 | "type": "datetime"
42 | }
43 | ]
44 | }
45 | },
46 | {
47 | "path": "my-awesome-extra-file.txt",
48 | "description": "This is my awesome extra file!"
49 | }
50 | ],
51 | "keywords": [
52 | "beginner",
53 | "tutorial"
54 | ]
55 | }
56 | ```
57 |
58 | ## Contents
59 | The following metadata is currently supported:
60 | * `kaggle datasets create` (create a new Dataset):
61 | * `title`: Title of the dataset, must be between 6 and 50 characters in length.
62 | * `subtitle`: Subtitle of the dataset, must be between 20 and 80 characters in length.
63 | * `description`: Description of the dataset.
64 | * `id`: The URL slug of your new dataset, a combination of:
65 | 1. Your username or organization slug (if you are a member of an organization).
66 | 2. A unique Dataset slug, must be between 3 and 50 characters in length.
67 | * `licenses`: Must have exactly one entry that specifies the license. Only `name` is evaluated, all other information is ignored. See below for options.
68 | * `resources`: Contains an array of files that are being uploaded. (Note - this is not required, nor if included, does it need to include all of the files to be uploaded.):
69 | * `path`: File path.
70 | * `description`: File description.
71 | * `schema`: File schema (definition below):
72 | * `fields`: Array of fields in the dataset. Please note that this needs to include ALL of the fields in the data in order or they will not be matched up correctly. A later version of the API will fix this bug.
73 | * `name`: Field name
74 | * `title`: Field description
75 | * `type`: Field type. A best-effort list of types will be kept at the bottom of this wiki page, but new types may be added that are not documented here.
76 | * `keywords`: Contains an array of strings that correspond to an existing tag on Kaggle. If a specified tag doesn't exist, the upload will continue, but that specific tag won't be added.
77 | * `kaggle datasets version` (create a new version for an existing Dataset):
78 | * `subtitle`: Subtitle of the dataset, must be between 20 and 80 characters in length.
79 | * `description`: Description of the dataset.
80 | * `id`: The URL slug of the dataset you want to update (see above). You must be the owner or otherwise have edit rights for this dataset. One of `id` or `id_no` must be specified. If both are, `id_no` will be preferred.
81 | * `id_no`: The ID of the dataset. One of `id` or `id_no` must be specified. You must be the owner or otherwise have edit rights for this dataset. If both are, `id_no` will be preferred.
82 | * `resources`: Contains an array of files that are being uploaded. (Note - this is not required, nor if included, does it need to include all of the files to be uploaded.):
83 | * `path`: File path.
84 | * `description`: File description.
85 | * `schema`: File schema (definition below):
86 | * `fields`: Array of fields in the dataset. Please note that this needs to include ALL of the fields in the data in order or they will not be matched up correctly. A later version of the API will fix this bug.
87 | * `name`: Field name
88 | * `title`: Field description
89 | * `type`: Field type. A best-effort list of types will be kept at the bottom of this wiki page, but new types may be added that are not documented here.
90 | * `keywords`: Contains an array of strings that correspond to an existing tag on Kaggle. If a specified tag doesn't exist, the upload will continue, but that specific tag won't be added.
91 |
92 | We will add further metadata processing in upcoming versions of the API.
93 |
94 | ## Licenses
95 | You can specify the following licenses for your datasets:
96 | * `CC0-1.0`: [CC0: Public Domain](https://creativecommons.org/publicdomain/zero/1.0/)
97 | * `CC-BY-SA-3.0`: [CC BY-SA 3.0](https://creativecommons.org/licenses/by-sa/3.0/)
98 | * `CC-BY-SA-4.0`: [CC BY-SA 4.0](https://creativecommons.org/licenses/by-sa/4.0/)
99 | * `CC-BY-NC-SA-4.0`: [CC BY-NC-SA 4.0](https://creativecommons.org/licenses/by-nc-sa/4.0/)
100 | * `GPL-2.0`: [GPL 2](http://www.gnu.org/licenses/old-licenses/gpl-2.0.en.html)
101 | * `ODbL-1.0`: Database: [Open Database](http://opendatacommons.org/licenses/odbl/1.0/), Contents: © Original Authors
102 | * `DbCL-1.0`: Database: [Open Database](http://opendatacommons.org/licenses/odbl/1.0/), Contents: [Database Contents](http://opendatacommons.org/licenses/dbcl/1.0/)
103 | * `copyright-authors`: Data files © Original Authors
104 | * `other`: Other (specified in description)
105 | * `unknown`: Unknown
106 | * `CC-BY-4.0`:
107 | https://creativecommons.org/licenses/by/4.0/
108 | * `CC-BY-NC-4.0`: https://creativecommons.org/licenses/by-nc/4.0/
109 | * `PDDL`: https://opendatacommons.org/licenses/pddl/1.0/
110 | * `CC-BY-3.0`:
111 | https://creativecommons.org/licenses/by/3.0/
112 | * `CC-BY-3.0-IGO`:
113 | https://creativecommons.org/licenses/by/3.0/igo/
114 | * `US-Government-Works`:
115 | https://www.usa.gov/government-works/
116 | * `CC-BY-NC-SA-3.0-IGO`:
117 | https://creativecommons.org/licenses/by-nc-sa/3.0/igo/
118 | * `CDLA-Permissive-1.0`:
119 | https://cdla.io/permissive-1-0/
120 | * `CDLA-Sharing-1.0`:
121 | https://cdla.io/sharing-1-0/
122 | * `CC-BY-ND-4.0`:
123 | https://creativecommons.org/licenses/by-nd/4.0/
124 | * `CC-BY-NC-ND-4.0`:
125 | https://creativecommons.org/licenses/by-nc-nd/4.0/
126 | * `ODC-BY-1.0`:
127 | https://opendatacommons.org/licenses/by/1-0/index.html
128 | * `LGPL-3.0`:
129 | http://www.gnu.org/licenses/lgpl-3.0.html
130 | * `AGPL-3.0`:
131 | http://www.gnu.org/licenses/agpl-3.0.html
132 | * `FDL-1.3`:
133 | http://www.gnu.org/licenses/fdl-1.3.html
134 | * `EU-ODP-Legal-Notice`: https://ec.europa.eu/info/legal-notice_en
135 | * `apache-2.0`:
136 | https://www.apache.org/licenses/LICENSE-2.0
137 | * `GPL-3.0`: [GPL 2](https://www.gnu.org/licenses/gpl-3.0.html)
138 |
139 | ## Data types
140 | You can specify the following data types
141 | * `string`
142 | * `boolean`
143 | * `numeric`
144 | * `datetime`
145 | * `id`
146 | * `uuid`
147 | * `latitude`
148 | * `longitude`
149 | * `coordinates`
150 | * `country`
151 | * `province` (these are states in the US)
152 | * `postalcode`
153 | * `address`
154 | * `email`
155 | * `url`
156 | * `integer`
157 | * `decimal`
158 | * `city`
159 |
--------------------------------------------------------------------------------
/docs/index.rst:
--------------------------------------------------------------------------------
1 | .. argparse::
2 | :filename: extended/cli.py
3 | :func: create_parser
4 | :prog: kaggle
5 |
6 |
--------------------------------------------------------------------------------
/docs/kernels_metadata.md:
--------------------------------------------------------------------------------
1 | To upload and run a kernel, a special `kernel-metadata.json` file must be specified.
2 |
3 | Here's a basic example for `kernel-metadata.json`:
4 | ```
5 | {
6 | "id": "timoboz/my-awesome-kernel",
7 | "id_no": 12345,
8 | "title": "My Awesome Kernel",
9 | "code_file": "my-awesome-kernel.ipynb",
10 | "language": "python",
11 | "kernel_type": "notebook",
12 | "is_private": "false",
13 | "enable_gpu": "false",
14 | "enable_internet": "false",
15 | "dataset_sources": ["timoboz/my-awesome-dataset"],
16 | "competition_sources": [],
17 | "kernel_sources": [],
18 | "model_sources": []
19 | }
20 | ```
21 | You can also use the API command `kaggle kernels init -p /path/to/kernel` to have the API create this file for you for a new kernel. If you wish to get the metadata for an existing kernel, you can use `kaggle kernels pull -p /path/to/download -k username/kernel-slug -m`.
22 |
23 | ## Contents
24 | We currently support the following metadata fields for kernels.
25 | * `id`: The URL slug of your kernel. One of `id` or `id_no` must be specified. If both are, `id_no` will be preferred.
26 | 1. Your username slug
27 | 2. A unique kernel slug
28 | * `id_no`: The kernel's numeric ID. One of `id` or `id_no` must be specified. If both are, `id_no` will be preferred.
29 | * `title`: The title of the kernel. Required for new kernels - optional for existing ones. Please be aware that kernel titles and slugs are linked to each other. A kernel slug is always the title lowercased with dashes (`-`) replacing spaces.
30 | * If you wish to rename your kernel, you may change the title within the metadata. However, you will need to update the `id` as well AFTER the rename is complete.
31 | * `code_file`: The path to your kernel source code. Required. If not an absolute path, it should be relative to the location of `kernel-metadata.json`.
32 | * `language`: The language your kernel is written in. Valid options are `python`, `r`, and `rmarkdown`. Required.
33 | * `kernel_type`: The type of kernel. Valid options are `script` and `notebook`. Required.
34 | * `is_private`: Whether or not the kernel should be private. If not specified, will be `true`.
35 | * `enable_gpu`: Whether or not the kernel should run on a GPU. If not specified, will be `false`.
36 | * `enable_internet`: Whether or not the kernel should be able to access the internet. If not specified, will be `false`.
37 | * `dataset_sources`: A list of dataset sources, specified as `"username/dataset-slug"`
38 | * `competition_sources`: A list of competition sources, specified as `"competition-slug"`
39 | * `kernel_sources`: A list of kernel sources, specified as `"username/kernel-slug"`
40 | * `model_sources`: A list of model sources, specified as `"username/model-slug/framework/variation-slug/version-number"`
41 |
42 | We will add further metadata processing in upcoming versions of the API.
--------------------------------------------------------------------------------
/docs/model_card.md:
--------------------------------------------------------------------------------
1 | # Model Summary
2 |
3 | Provide a brief overview of the model including details about its architecture, how it can be used, characteristics of the model, training data, and evaluation results.
4 |
5 | ## Usage
6 |
7 | How can this model be used? You should provide a code snippet that demonstrates how to load and/or fine-tune your model, and you should define the shape of both the inputs and the outputs. Are there known and preventable failures to be aware of?
8 |
9 | ## System
10 |
11 | Is this a standalone model or part of a system? What are the input requirements? What are the downstream dependencies when using the model outputs?
12 |
13 | ## Implementation requirements
14 |
15 | What hardware and software were used for training the model? Describe the compute requirements for training and inference (e.g., # of chips, training time, total computation, measured performance, energy consumption).
16 |
17 | # Model Characteristics
18 |
19 | ## Model initialization
20 |
21 | Was the model trained from scratch or fine-tuned from a pre-trained model?
22 |
23 | ## Model stats
24 |
25 | What’s the size of the model? Provide information about size, weights, layers, and latency.
26 |
27 | ## Other details
28 |
29 | Is the model pruned? Is it quantized? Describe any techniques to preserve differential privacy.
30 |
31 | # Data Overview
32 |
33 | Provide more details about the data used to train this model.
34 |
35 | ## Training data
36 |
37 | Describe the data that was used to train the model. How was it collected? What pre-processing was done?
38 |
39 | ## Demographic groups
40 |
41 | Describe any demographic data or attributes that suggest demographic groups
42 |
43 | ## Evaluation data
44 |
45 | What was the train / test / dev split? Are there notable differences between training and test data?
46 |
47 | # Evaluation Results
48 |
49 | ## Summary
50 |
51 | Summarize and link to evaluation results for this analysis.
52 |
53 | ## Subgroup evaluation results
54 |
55 | Did you do any subgroup analysis? Describe the results and any assumptions about disaggregating data. Are there any known and preventable failures about this model?
56 |
57 | ## Fairness
58 |
59 | How did you define fairness? What metrics and baselines did you use? What were the results of your analysis?
60 |
61 | ## Usage limitations
62 |
63 | Are there sensitive use cases? What factors might limit model performance and what conditions should be satisfied to use this model?
64 |
65 | ## Ethics
66 |
67 | What ethical factors did the model developers consider? Were any risks identified? What mitigations or remediates were undertaken?
68 |
--------------------------------------------------------------------------------
/docs/modelinstance_usage.md:
--------------------------------------------------------------------------------
1 | # Model Format
2 |
3 | Describe the format for the model (e.g. a SavedModel file for TF 2.0)
4 |
5 | # Training Data
6 |
7 | Describe the data that the model instance was trained on.
8 |
9 | # Model Inputs
10 |
11 | Describe the type and the shape of the model inputs.
12 |
13 | # Model Outputs
14 |
15 | Describe the type and the shape of the model outputs.
16 |
17 | # Model Usage
18 |
19 | Provide code snippets that demonstrate how to load and make use of the model instance.
20 |
21 | # Fine-tuning
22 |
23 | Provide code snippets that demonstrate how to fine-tune the model instance (if applicable).
24 |
25 | # Changelog
26 |
27 | Describe the differences between the different versions for this specific model instance (if applicable).
28 |
--------------------------------------------------------------------------------
/docs/models_metadata.md:
--------------------------------------------------------------------------------
1 | A full model is composed of 3 types of entities:
2 |
3 | 1. The model
4 | 2. The instances
5 | 3. The instance versions
6 |
7 | Let's take the example of [efficientnet](https://www.kaggle.com/models/tensorflow/efficientnet) to explain these entities.
8 |
9 | A model like `efficientnet` contains multiple instances.
10 |
11 | An instance is a specific variation of the model (e.g. B0, B1, ...) with a certain framework (e.g. TensorFlow2).
12 |
13 | ## Model
14 |
15 | To create a model, a special `model-metadata.json` file must be specified.
16 |
17 | Here's a basic example for `model-metadata.json`:
18 | ```
19 | {
20 | "ownerSlug": "INSERT_OWNER_SLUG_HERE",
21 | "title": "INSERT_TITLE_HERE",
22 | "slug": "INSERT_SLUG_HERE",
23 | "subtitle": "",
24 | "isPrivate": true,
25 | "description": "Model Card Markdown, see below",
26 | "publishTime": "",
27 | "provenanceSources": ""
28 | }
29 | ```
30 |
31 | You can also use the API command `kaggle models init -p /path/to/model` to have the API create this file for you for a new model. If you wish to get the metadata for an existing model, you can use `kaggle models get username/model-slug`.
32 |
33 | ### Contents
34 |
35 | We currently support the following metadata fields for models.
36 |
37 | * `ownerSlug`: the slug of the user or organization
38 | * `title`: the model's title
39 | * `slug`: the model's slug (unique per owner)
40 | * `licenseName`: the name of the license (see the list below)
41 | * `subtitle`: the model's subtitle
42 | * `isPrivate`: whether or not the model should be private (only visible by the owners). If not specified, will be `true`
43 | * `description`: the model's card in markdown syntax (see the template below)
44 | * `publishTime`: the original publishing time of the model
45 | * `provenanceSources`: the provenance of the model
46 |
47 | ### Description
48 |
49 | You can find a template of the model card on this wiki page: https://github.com/Kaggle/kaggle-api/wiki/Model-Card
50 |
51 | ## Model Instance
52 |
53 | To create a model instance, a special `model-instance-metadata.json` file must be specified.
54 |
55 | Here's a basic example for `model-instance-metadata.json`:
56 | ```
57 | {
58 | "ownerSlug": "INSERT_OWNER_SLUG_HERE",
59 | "modelSlug": "INSERT_EXISTING_MODEL_SLUG_HERE",
60 | "instanceSlug": "INSERT_INSTANCE_SLUG_HERE",
61 | "framework": "INSERT_FRAMEWORK_HERE",
62 | "overview": "",
63 | "usage": "Usage Markdown, see below",
64 | "licenseName": "Apache 2.0",
65 | "fineTunable": False,
66 | "trainingData": [],
67 | "modelInstanceType": "Unspecified",
68 | "baseModelInstance": "",
69 | "externalBaseModelUrl": ""
70 | }
71 | ```
72 |
73 | You can also use the API command `kaggle models instances init -p /path/to/model-instance` to have the API create this file for you for a new model instance.
74 |
75 | ### Contents
76 |
77 | We currently support the following metadata fields for model instances.
78 |
79 | * `ownerSlug`: the slug of the user or organization of the model
80 | * `modelSlug`: the existing model's slug
81 | * `instanceSlug`: the slug of the instance
82 | * `framework`: the instance's framework (possible options: `tensorFlow1`,`tensorFlow2`,`tfLite`,`tfJs`,`pyTorch`,`jax`,`coral`, ...)
83 | * `overview`: a short overview of the instance
84 | * `usage`: the instance's usage in markdown syntax (see the template below)
85 | * `fineTunable`: whether the instance is fine tunable
86 | * `trainingData`: a list of training data in the form of strings, URLs, Kaggle Datasets, etc...
87 | * `modelInstanceType`: whether the model instance is a base model, external variant, internal variant, or unspecified
88 | * `baseModelInstance`: if this is an internal variant, the `{owner-slug}/{model-slug}/{framework}/{instance-slug}` of the base model instance
89 | * `externalBaseModelUrl`: if this is an external variant, a URL to the base model
90 |
91 | ### Licenses
92 |
93 | Here is a list of the available licenses for models:
94 |
95 | - Apache 2.0
96 | - Attribution 3.0 IGO (CC BY 3.0 IGO)
97 | - Attribution 3.0 Unported (CC BY 3.0)
98 | - Attribution 4.0 International (CC BY 4.0)
99 | - Attribution-NoDerivatives 4.0 International (CC BY-ND 4.0)
100 | - Attribution-NonCommercial 4.0 International (CC BY-NC 4.0)
101 | - Attribution-NonCommercial-NoDerivatives 4.0 International (CC BY-NC-ND 4.0)
102 | - Attribution-NonCommercial-ShareAlike 3.0 IGO (CC BY-NC-SA 3.0 IGO)
103 | - BSD-3-Clause
104 | - CC BY-NC-SA 4.0
105 | - CC BY-SA 3.0
106 | - CC BY-SA 4.0
107 | - CC0: Public Domain
108 | - Community Data License Agreement - Permissive - Version 1.0
109 | - Community Data License Agreement - Sharing - Version 1.0
110 | - GNU Affero General Public License 3.0
111 | - GNU Free Documentation License 1.3
112 | - GNU Lesser General Public License 3.0
113 | - GPL 2
114 | - MIT
115 | - ODC Attribution License (ODC-By)
116 | - ODC Public Domain Dedication and Licence (PDDL)
117 | - GPL 3
118 |
119 | ### Usage
120 |
121 | You can find a template of the Usage markdown on this wiki page: https://github.com/Kaggle/kaggle-api/wiki/ModelInstance-Usage
122 |
123 | The following template variables can be used in this markdown:
124 |
125 | - `${VERSION_NUMBER}` is replaced by the version number when rendered
126 | - `${VARIATION_SLUG}` is replaced by the variation slug when rendered
127 | - `${FRAMEWORK}` is replaced by the framework name
128 | - `${PATH}` is replaced by `/kaggle/input////`.
129 | - `${FILEPATH}` is replaced by `/kaggle/input/////`. This value is only defined if the databundle contain a single file
130 | - `${URL}` is replaced by the absolute URL of the model
--------------------------------------------------------------------------------
/documentation/competitions.md:
--------------------------------------------------------------------------------
1 | # Competitions Commands
2 |
3 | Commands for interacting with Kaggle competitions.
4 |
5 | ## `kaggle competitions list`
6 |
7 | Lists available competitions.
8 |
9 | **Usage:**
10 |
11 | ```bash
12 | kaggle competitions list [options]
13 | ```
14 |
15 | **Options:**
16 |
17 | * `--group `: Filter by competition group. Valid options: `general`, `entered`, `inClass`.
18 | * `--category `: Filter by competition category. Valid options: `all`, `featured`, `research`, `recruitment`, `gettingStarted`, `masters`, `playground`.
19 | * `--sort-by `: Sort results. Valid options: `grouped`, `prize`, `earliestDeadline`, `latestDeadline`, `numberOfTeams`, `recentlyCreated` (default: `latestDeadline`).
20 | * `-p, --page `: Page number for results (default: 1).
21 | * `-s, --search `: Search term.
22 | * `-v, --csv`: Print results in CSV format.
23 |
24 | **Example:**
25 |
26 | List featured competitions in the general group, sorted by prize:
27 |
28 | ```bash
29 | kaggle competitions list --group general --category featured --sort-by prize
30 | ```
31 |
32 | **Purpose:**
33 |
34 | This command helps you discover new competitions or find specific ones based on various criteria.
35 |
36 | ## `kaggle competitions files`
37 |
38 | Lists files for a specific competition.
39 |
40 | **Usage:**
41 |
42 | ```bash
43 | kaggle competitions files [options]
44 | ```
45 |
46 | **Arguments:**
47 |
48 | * ``: Competition URL suffix (e.g., `titanic`).
49 |
50 | **Options:**
51 |
52 | * `-v, --csv`: Print results in CSV format.
53 | * `-q, --quiet`: Suppress verbose output.
54 | * `--page-token `: Page token for results paging.
55 | * `--page-size `: Number of items to show on a page (default: 20, max: 200).
56 |
57 | **Example:**
58 |
59 | List the first 3 files for the "titanic" competition in CSV format, quietly:
60 |
61 | ```bash
62 | kaggle competitions files titanic --page-size=3 -v -q
63 | ```
64 |
65 | **Purpose:**
66 |
67 | Use this command to see the data files available for a competition before downloading them.
68 |
69 | ## `kaggle competitions download`
70 |
71 | Downloads competition files.
72 |
73 | **Usage:**
74 |
75 | ```bash
76 | kaggle competitions download [options]
77 | ```
78 |
79 | **Arguments:**
80 |
81 | * ``: Competition URL suffix (e.g., `titanic`).
82 |
83 | **Options:**
84 |
85 | * `-f, --file `: Specific file to download (downloads all if not specified).
86 | * `-p, --path `: Folder to download files to (defaults to current directory).
87 | * `-w, --wp`: Download files to the current working path (equivalent to `-p .`).
88 | * `-o, --force`: Force download, overwriting existing files.
89 | * `-q, --quiet`: Suppress verbose output.
90 |
91 | **Examples:**
92 |
93 | 1. Download all files for the "titanic" competition to the current directory, overwriting existing files, quietly:
94 |
95 | ```bash
96 | kaggle competitions download titanic -w -o -q
97 | ```
98 |
99 | 2. Download the `test.csv` file from the "titanic" competition to a folder named `tost`:
100 |
101 | ```bash
102 | kaggle competitions download titanic -f test.csv -p tost
103 | ```
104 |
105 | **Purpose:**
106 |
107 | This command allows you to get the necessary data files for a competition onto your local machine.
108 |
109 | ## `kaggle competitions submit`
110 |
111 | Makes a new submission to a competition.
112 |
113 | **Usage:**
114 |
115 | ```bash
116 | kaggle competitions submit -f -m [options]
117 | ```
118 |
119 | **Arguments:**
120 |
121 | * ``: Competition URL suffix (e.g., `house-prices-advanced-regression-techniques`).
122 | * `-f, --file `: The submission file.
123 | * `-m, --message `: The submission message.
124 |
125 | **Options:**
126 |
127 | * `-k, --kernel `: Name of the kernel (notebook) to submit (for code competitions).
128 | * `-v, --version `: Version of the kernel to submit.
129 | * `-q, --quiet`: Suppress verbose output.
130 |
131 | **Example:**
132 |
133 | Submit `sample_submission.csv` to the "house-prices-advanced-regression-techniques" competition with the message "Test message":
134 |
135 | ```bash
136 | kaggle competitions submit house-prices-advanced-regression-techniques -f sample_submission.csv -m "Test message"
137 | ```
138 |
139 | **Purpose:**
140 |
141 | Use this command to upload your predictions or code to a competition for scoring.
142 |
143 | ## `kaggle competitions submissions`
144 |
145 | Shows your past submissions for a competition.
146 |
147 | **Usage:**
148 |
149 | ```bash
150 | kaggle competitions submissions [options]
151 | ```
152 |
153 | **Arguments:**
154 |
155 | * ``: Competition URL suffix (e.g., `house-prices-advanced-regression-techniques`).
156 |
157 | **Options:**
158 |
159 | * `-v, --csv`: Print results in CSV format.
160 | * `-q, --quiet`: Suppress verbose output.
161 |
162 | **Example:**
163 |
164 | Show submissions for "house-prices-advanced-regression-techniques" in CSV format, quietly:
165 |
166 | ```bash
167 | kaggle competitions submissions house-prices-advanced-regression-techniques -v -q
168 | ```
169 |
170 | **Purpose:**
171 |
172 | This command allows you to review your previous submission attempts and their scores.
173 |
174 | ## `kaggle competitions leaderboard`
175 |
176 | Gets competition leaderboard information.
177 |
178 | **Usage:**
179 |
180 | ```bash
181 | kaggle competitions leaderboard [options]
182 | ```
183 |
184 | **Arguments:**
185 |
186 | * ``: Competition URL suffix (e.g., `titanic`).
187 |
188 | **Options:**
189 |
190 | * `-s, --show`: Show the top of the leaderboard in the console.
191 | * `-d, --download`: Download the entire leaderboard to a CSV file.
192 | * `-p, --path `: Folder to download the leaderboard to (if `-d` is used).
193 | * `-v, --csv`: Print results in CSV format (used with `-s`).
194 | * `-q, --quiet`: Suppress verbose output.
195 |
196 | **Examples:**
197 |
198 | 1. Download the "titanic" leaderboard to a folder named `leaders`, quietly:
199 |
200 | ```bash
201 | kaggle competitions leaderboard titanic -d -p leaders -q
202 | ```
203 |
204 | 2. Download the leaderboard and save it to `leaderboard.txt`:
205 |
206 | ```bash
207 | kaggle competitions leaderboard titanic > leaderboard.txt
208 | ```
209 |
210 | **Purpose:**
211 |
212 | This command lets you view your ranking and the scores of other participants in a competition.
213 |
--------------------------------------------------------------------------------
/documentation/configuration.md:
--------------------------------------------------------------------------------
1 | # Kaggle CLI Configuration
2 |
3 | The Kaggle CLI uses a configuration file to store settings such as your API credentials and default values for commands.
4 |
5 | ## Configuration Commands
6 |
7 | ### `config view`
8 |
9 | Displays the current configuration values.
10 |
11 | **Usage:**
12 |
13 | ```bash
14 | kaggle config view
15 | ```
16 |
17 | **Purpose:**
18 |
19 | This command allows you to inspect the current settings of your Kaggle CLI, such as the configured API endpoint, proxy settings, and default competition.
20 |
21 | ### `config set`
22 |
23 | Sets a specific configuration value.
24 |
25 | **Usage:**
26 |
27 | ```bash
28 | kaggle config set -n -v
29 | ```
30 |
31 | **Arguments:**
32 |
33 | * `-n, --name `: The name of the configuration parameter to set. Valid options are `competition`, `path`, and `proxy`.
34 | * `-v, --value `: The value to set for the configuration parameter.
35 | * For `competition`: The competition URL suffix (e.g., `titanic`).
36 | * For `path`: The default folder where files will be downloaded.
37 | * For `proxy`: The proxy server URL.
38 |
39 | **Example:**
40 |
41 | Set the default competition to "titanic":
42 |
43 | ```bash
44 | kaggle config set -n competition -v titanic
45 | ```
46 |
47 | **Purpose:**
48 |
49 | Use this command to customize the behavior of the Kaggle CLI, such as setting a default competition to avoid specifying it in every command, defining a default download path, or configuring a proxy server.
50 |
51 | ### `config unset`
52 |
53 | Clears a specific configuration value, reverting it to its default.
54 |
55 | **Usage:**
56 |
57 | ```bash
58 | kaggle config unset -n
59 | ```
60 |
61 | **Arguments:**
62 |
63 | * `-n, --name `: The name of the configuration parameter to clear. Valid options are `competition`, `path`, and `proxy`.
64 |
65 | **Example:**
66 |
67 | Clear the default competition:
68 |
69 | ```bash
70 | kaggle config unset -n competition
71 | ```
72 |
73 | **Purpose:**
74 |
75 | This command removes a previously set configuration value, allowing the CLI to use its default behavior or prompt for the value if required.
76 |
77 | ## Configuration File Location
78 |
79 | The Kaggle CLI configuration is typically stored in a file named `kaggle.json` located in the `~/.kaggle/` directory on Linux and macOS, or `C:\Users\\.kaggle\` on Windows.
80 |
81 | This file contains your API username and key:
82 |
83 | ```json
84 | {"username":"YOUR_USERNAME","key":"YOUR_API_KEY"}
85 | ```
86 |
87 | You can download this file from your Kaggle account page (`https://www.kaggle.com//account`) and place it in the correct directory.
88 |
89 | Alternatively, you can set the `KAGGLE_USERNAME` and `KAGGLE_KEY` environment variables.
90 |
--------------------------------------------------------------------------------
/documentation/index.md:
--------------------------------------------------------------------------------
1 | # Kaggle CLI Documentation
2 |
3 | Welcome to the Kaggle CLI documentation. This guide provides detailed information on how to use the Kaggle command-line interface to interact with Kaggle's platform.
4 |
5 | ## Getting Started
6 |
7 | Before you begin, ensure you have the Kaggle CLI installed and configured with your API credentials. You can find your API token on your Kaggle account page.
8 |
9 | ## Command Groups
10 |
11 | The Kaggle CLI is organized into several command groups:
12 |
13 | * [Competitions](./competitions.md): Manage and participate in Kaggle competitions.
14 | * [Datasets](./datasets.md): Search, download, and manage Kaggle datasets.
15 | * [Kernels](./kernels.md): Interact with Kaggle Kernels (notebooks and scripts).
16 | * [Models](./models.md): Manage your Kaggle Models.
17 | * [Model Instances](./model_instances.md): Manage instances of your Kaggle Models.
18 | * [Model Instance Variations](./model_instance_variations.md): Manage variations of your Kaggle Model Instances.
19 | * [Configuration](./configuration.md): Configure the Kaggle CLI.
20 |
21 | ## Tutorials
22 |
23 | Explore these tutorials to learn how to perform common tasks:
24 |
25 | * [Tutorials](./tutorials.md)
26 | * [How to Submit to a Competition](./tutorials.md#tutorial-how-to-submit-to-a-competition)
27 |
28 |
--------------------------------------------------------------------------------
/documentation/model_instance_versions.md:
--------------------------------------------------------------------------------
1 | # Model Instance Versions Commands
2 |
3 | Commands for managing versions of a specific Kaggle Model Instance. Each version represents a snapshot of the model instance files at a point in time.
4 |
5 | ## `kaggle models instances versions create`
6 |
7 | Creates a new version of an existing model instance.
8 |
9 | **Usage:**
10 |
11 | ```bash
12 | kaggle models instances versions create -p [options]
13 | ```
14 |
15 | **Arguments:**
16 |
17 | * ``: The target model instance URL suffix for the new version (format: `owner/model-slug/framework/instance-slug`, e.g., `$KAGGLE_DEVELOPER/test-model/jax/main`).
18 |
19 | **Options:**
20 |
21 | * `-p, --path `: Path to the folder containing the files for this new version (defaults to the current directory).
22 | * `-n, --version-notes `: Notes describing this version.
23 | * `-q, --quiet`: Suppress verbose output.
24 | * `-r, --dir-mode `: How to handle directories within the upload: `skip` (ignore), `zip` (compressed upload), `tar` (uncompressed upload) (default: `skip`).
25 |
26 | **Example:**
27 |
28 | Create a new version for the model instance `$KAGGLE_DEVELOPER/test-model/jax/main` using files from the `tmp` folder, with version notes "Updated model files", quietly, and skipping subdirectories:
29 |
30 | ```bash
31 | # Ensure tmp folder contains the new files for the version, e.g., data_v2.csv
32 | # echo "e,f,g,h" > tmp/data_v2.csv
33 |
34 | kaggle models instances versions create $KAGGLE_DEVELOPER/test-model/jax/main -p tmp -n "Updated model files" -q -r skip
35 | ```
36 |
37 | **Purpose:**
38 |
39 | This command uploads a new set of files to an existing model instance, creating a new, numbered version. This allows you to track changes and revert to previous versions of your model instance files.
40 |
41 | ## `kaggle models instances versions download`
42 |
43 | Downloads files for a specific version of a model instance.
44 |
45 | **Usage:**
46 |
47 | ```bash
48 | kaggle models instances versions download [options]
49 | ```
50 |
51 | **Arguments:**
52 |
53 | * ``: Model instance version URL suffix in the format `owner/model-slug/framework/instance-slug/version-number` (e.g., `$KAGGLE_DEVELOPER/test-model/jax/main/1`).
54 |
55 | **Options:**
56 |
57 | * `-p, --path `: Folder to download files to (defaults to current directory).
58 | * `--untar`: Untar the downloaded file if it's a `.tar` archive (deletes the `.tar` file afterwards).
59 | * `--unzip`: Unzip the downloaded file if it's a `.zip` archive (deletes the `.zip` file afterwards).
60 | * `-f, --force`: Force download, overwriting existing files.
61 | * `-q, --quiet`: Suppress verbose output.
62 |
63 | **Example:**
64 |
65 | Download version 1 of the model instance `$KAGGLE_DEVELOPER/test-model/jax/main` into the `tmp` folder, untar if applicable, force overwrite, and do it quietly:
66 |
67 | ```bash
68 | kaggle models instances versions download $KAGGLE_DEVELOPER/test-model/jax/main/1 -p tmp -q -f --untar
69 | ```
70 |
71 | **Purpose:**
72 |
73 | This command allows you to retrieve the specific files associated with a particular version of a model instance.
74 |
75 | ## `kaggle models instances versions files`
76 |
77 | Lists files for a specific version of a model instance.
78 |
79 | **Usage:**
80 |
81 | ```bash
82 | kaggle models instances versions files [options]
83 | ```
84 |
85 | **Arguments:**
86 |
87 | * ``: Model instance version URL suffix (e.g., `google/gemma/pytorch/7b/2`).
88 |
89 | **Options:**
90 |
91 | * `-v, --csv`: Print results in CSV format.
92 | * `--page-size `: Number of items per page (default: 20).
93 | * `--page-token `: Page token for results paging.
94 |
95 | **Example:**
96 |
97 | List the first 3 files for version 2 of the model instance `google/gemma/pytorch/7b` in CSV format:
98 |
99 | ```bash
100 | kaggle models instances versions files google/gemma/pytorch/7b/2 -v --page-size=3
101 | ```
102 |
103 | **Purpose:**
104 |
105 | Use this command to see the individual files that constitute a specific version of a model instance before downloading.
106 |
107 | ## `kaggle models instances versions delete`
108 |
109 | Deletes a specific version of a model instance from Kaggle.
110 |
111 | **Usage:**
112 |
113 | ```bash
114 | kaggle models instances versions delete [options]
115 | ```
116 |
117 | **Arguments:**
118 |
119 | * ``: Model instance version URL suffix in the format `owner/model-slug/framework/instance-slug/version-number` (e.g., `$KAGGLE_DEVELOPER/test-model/jax/main/1`).
120 |
121 | **Options:**
122 |
123 | * `-y, --yes`: Automatically confirm deletion without prompting.
124 |
125 | **Example:**
126 |
127 | Delete version 1 of the model instance `$KAGGLE_DEVELOPER/test-model/jax/main` and automatically confirm:
128 |
129 | ```bash
130 | kaggle models instances versions delete $KAGGLE_DEVELOPER/test-model/jax/main/1 -y
131 | ```
132 |
133 | **Purpose:**
134 |
135 | This command permanently removes a specific version of your model instance from Kaggle. Use with caution. If it's the only version, this may lead to the deletion of the model instance itself if no other versions exist.
136 |
--------------------------------------------------------------------------------
/documentation/model_instances.md:
--------------------------------------------------------------------------------
1 | # Model Instances Commands
2 |
3 | Commands for interacting with instances of Kaggle Models. A model instance typically represents a specific framework of a parent model.
4 |
5 | ## `kaggle models instances init`
6 |
7 | Initializes a metadata file (`model-instance-metadata.json`) for creating a new model instance.
8 |
9 | **Usage:**
10 |
11 | ```bash
12 | kaggle models instances init -p
13 | ```
14 |
15 | **Options:**
16 |
17 | * `-p, --path `: The path to the folder where the `model-instance-metadata.json` file will be created (defaults to the current directory).
18 |
19 | **Example:**
20 |
21 | Initialize a model instance metadata file in the `tmp` folder:
22 |
23 | ```bash
24 | kaggle models instances init -p tmp
25 | ```
26 |
27 | **Purpose:**
28 |
29 | This command creates a template `model-instance-metadata.json` file. You must edit this file with details such as the owner slug, the parent model slug, the instance slug (URL-friendly name for this instance), and the framework (e.g., `tensorflow`, `pytorch`, `jax`, `sklearn`) before creating the instance.
30 |
31 | ## `kaggle models instances create`
32 |
33 | Creates a new model instance under an existing model on Kaggle.
34 |
35 | **Usage:**
36 |
37 | ```bash
38 | kaggle models instances create -p [options]
39 | ```
40 |
41 | **Options:**
42 |
43 | * `-p, --path `: Path to the folder containing the model instance files and the `model-instance-metadata.json` file (defaults to the current directory).
44 | * `-q, --quiet`: Suppress verbose output.
45 | * `-r, --dir-mode `: How to handle directories within the upload: `skip` (ignore), `zip` (compressed upload), `tar` (uncompressed upload) (default: `skip`).
46 |
47 | **Example:**
48 |
49 | Create a new model instance using the metadata and files in the `tmp` folder, quietly, skipping subdirectories. (Assumes `model-instance-metadata.json` in `tmp` has been properly edited):
50 |
51 | ```bash
52 | # Example: Edit model-instance-metadata.json first
53 | # sed -i 's/INSERT_OWNER_SLUG_HERE/your-username/' tmp/model-instance-metadata.json
54 | # sed -i 's/INSERT_EXISTING_MODEL_SLUG_HERE/parent-model-slug/' tmp/model-instance-metadata.json
55 | # sed -i 's/INSERT_INSTANCE_SLUG_HERE/my-instance-slug/' tmp/model-instance-metadata.json
56 | # sed -i 's/INSERT_FRAMEWORK_HERE/jax/' tmp/model-instance-metadata.json
57 | # echo "a,b,c,d" > tmp/data.csv # Example model file
58 |
59 | kaggle models instances create -p tmp -q -r skip
60 | ```
61 |
62 | **Purpose:**
63 |
64 | This command uploads your local model files (e.g., weights, architecture definition) and the associated instance metadata to create a new instance under a specified parent model on Kaggle. This effectively creates the first version of this model instance.
65 |
66 | ## `kaggle models instances get`
67 |
68 | Downloads the `model-instance-metadata.json` file for an existing model instance.
69 |
70 | **Usage:**
71 |
72 | ```bash
73 | kaggle models instances get -p
74 | ```
75 |
76 | **Arguments:**
77 |
78 | * ``: Model instance URL suffix in the format `owner/model-slug/framework/instance-slug` (e.g., `$KAGGLE_DEVELOPER/test-model/jax/main`).
79 |
80 | **Options:**
81 |
82 | * `-p, --path `: Folder to download the `model-instance-metadata.json` file to.
83 |
84 | **Example:**
85 |
86 | Download the metadata for model instance `$KAGGLE_DEVELOPER/test-model/jax/main` into the `tmp` folder:
87 |
88 | ```bash
89 | kaggle models instances get $KAGGLE_DEVELOPER/test-model/jax/main -p tmp
90 | ```
91 |
92 | **Purpose:**
93 |
94 | This command retrieves the metadata file for an existing model instance. This can be useful for inspection or as a basis for an update.
95 |
96 | ## `kaggle models instances files`
97 |
98 | Lists files for the current version of a model instance.
99 |
100 | **Usage:**
101 |
102 | ```bash
103 | kaggle models instances files [options]
104 | ```
105 |
106 | **Arguments:**
107 |
108 | * ``: Model instance URL suffix (e.g., `$KAGGLE_DEVELOPER/test-model/jax/main`).
109 |
110 | **Options:**
111 |
112 | * `-v, --csv`: Print results in CSV format.
113 | * `--page-size `: Number of items per page (default: 20).
114 | * `--page-token `: Page token for results paging.
115 |
116 | **Example:**
117 |
118 | List the first 5 files for the model instance `$KAGGLE_DEVELOPER/test-model/jax/main` in CSV format:
119 |
120 | ```bash
121 | kaggle models instances files $KAGGLE_DEVELOPER/test-model/jax/main -v --page-size 5
122 | ```
123 |
124 | **Purpose:**
125 |
126 | Use this command to see the files associated with the latest version of a specific model instance.
127 |
128 | ## `kaggle models instances update`
129 |
130 | Updates an existing model instance on Kaggle using a local `model-instance-metadata.json` file.
131 |
132 | **Usage:**
133 |
134 | ```bash
135 | kaggle models instances update -p
136 | ```
137 |
138 | **Options:**
139 |
140 | * `-p, --path `: Path to the folder containing the `model-instance-metadata.json` file with the updated information (defaults to the current directory). Note: This command only updates the metadata of the instance, not the files. To update files, create a new version.
141 |
142 | **Example:**
143 |
144 | Update the model instance whose details are in `tmp/model-instance-metadata.json` (ensure the slugs and owner in the JSON match an existing model instance):
145 |
146 | ```bash
147 | kaggle models instances update -p tmp
148 | ```
149 |
150 | **Purpose:**
151 |
152 | Use this command to change the metadata of an existing model instance, such as its description or other fields defined in the `model-instance-metadata.json` file. This does not upload new files or create a new version.
153 |
154 | ## `kaggle models instances delete`
155 |
156 | Deletes a model instance from Kaggle.
157 |
158 | **Usage:**
159 |
160 | ```bash
161 | kaggle models instances delete [options]
162 | ```
163 |
164 | **Arguments:**
165 |
166 | * ``: Model instance URL suffix in the format `owner/model-slug/framework/instance-slug` (e.g., `$KAGGLE_DEVELOPER/test-model/jax/main`).
167 |
168 | **Options:**
169 |
170 | * `-y, --yes`: Automatically confirm deletion without prompting.
171 |
172 | **Example:**
173 |
174 | Delete the model instance `$KAGGLE_DEVELOPER/test-model/jax/main` and automatically confirm:
175 |
176 | ```bash
177 | kaggle models instances delete $KAGGLE_DEVELOPER/test-model/jax/main -y
178 | ```
179 |
180 | **Purpose:**
181 |
182 | This command permanently removes one of your model instances (and all its versions) from Kaggle. Use with caution.
183 |
--------------------------------------------------------------------------------
/documentation/models.md:
--------------------------------------------------------------------------------
1 | # Models Commands
2 |
3 | Commands for interacting with Kaggle Models.
4 |
5 | ## `kaggle models list`
6 |
7 | Lists available models.
8 |
9 | **Usage:**
10 |
11 | ```bash
12 | kaggle models list [options]
13 | ```
14 |
15 | **Options:**
16 |
17 | * `--owner `: Filter by a specific user or organization.
18 | * `--sort-by `: Sort results. Valid options: `hotness`, `downloadCount`, `voteCount`, `notebookCount`, `createTime` (default: `hotness`).
19 | * `-s, --search `: Search term.
20 | * `--page-size `: Number of items per page (default: 20).
21 | * `--page-token `: Page token for results paging.
22 | * `-v, --csv`: Print results in CSV format.
23 |
24 | **Examples:**
25 |
26 | 1. List models owned by `$KAGGLE_DEVELOPER` (replace with your username), sorted by creation time, in CSV format:
27 |
28 | ```bash
29 | kaggle models list --owner $KAGGLE_DEVELOPER --sort-by createTime -v
30 | ```
31 |
32 | 2. List the first 5 models matching the search term "gemini":
33 |
34 | ```bash
35 | kaggle models list -s gemini --page-size 5
36 | ```
37 |
38 | **Purpose:**
39 |
40 | This command helps you find models on Kaggle, filtering by owner or searching by keywords, and sorting by various criteria.
41 |
42 | ## `kaggle models init`
43 |
44 | Initializes a metadata file (`model-metadata.json`) for creating a new model.
45 |
46 | **Usage:**
47 |
48 | ```bash
49 | kaggle models init -p
50 | ```
51 |
52 | **Options:**
53 |
54 | * `-p, --path `: The path to the folder where the `model-metadata.json` file will be created (defaults to the current directory).
55 |
56 | **Example:**
57 |
58 | Initialize a model metadata file in a new temporary folder `tmp`:
59 |
60 | ```bash
61 | mkdir tmp
62 | kaggle models init -p tmp
63 | ```
64 |
65 | **Purpose:**
66 |
67 | This command creates a template `model-metadata.json` file. You must edit this file with your model's details, such as owner slug, title, model slug (URL-friendly version of the title), and a description, before creating the model on Kaggle.
68 |
69 | ## `kaggle models create`
70 |
71 | Creates a new model on Kaggle.
72 |
73 | **Usage:**
74 |
75 | ```bash
76 | kaggle models create -p
77 | ```
78 |
79 | **Options:**
80 |
81 | * `-p, --path `: Path to the folder containing the `model-metadata.json` file (defaults to the current directory). This folder should also contain your model files that you intend to upload as part of the first model instance.
82 |
83 | **Example:**
84 |
85 | Create a new model using the metadata in `tmp/model-metadata.json`. (Assumes the metadata file has been edited with owner, title, and slug):
86 |
87 | ```bash
88 | # Example: Edit model-metadata.json first
89 | # sed -i 's/INSERT_OWNER_SLUG_HERE/your-username/' tmp/model-metadata.json
90 | # sed -i 's/INSERT_TITLE_HERE/My Awesome Model/' tmp/model-metadata.json
91 | # sed -i 's/INSERT_SLUG_HERE/my-awesome-model/' tmp/model-metadata.json
92 |
93 | kaggle models create -p tmp
94 | ```
95 |
96 | **Purpose:**
97 |
98 | This command registers a new model on Kaggle using the provided metadata. After this, you will typically create model instances and versions.
99 |
100 | ## `kaggle models get`
101 |
102 | Downloads the `model-metadata.json` file for an existing model.
103 |
104 | **Usage:**
105 |
106 | ```bash
107 | kaggle models get -p
108 | ```
109 |
110 | **Arguments:**
111 |
112 | * ``: Model URL suffix in the format `owner/model-slug` (e.g., `$KAGGLE_DEVELOPER/test-model`).
113 |
114 | **Options:**
115 |
116 | * `-p, --path `: Folder to download the `model-metadata.json` file to.
117 |
118 | **Example:**
119 |
120 | Download the metadata for model `$KAGGLE_DEVELOPER/test-model` into the `tmp` folder:
121 |
122 | ```bash
123 | kaggle models get -p tmp $KAGGLE_DEVELOPER/test-model
124 | ```
125 |
126 | **Purpose:**
127 |
128 | This command retrieves the metadata file for an existing model, which can be useful for inspection or as a basis for an update.
129 |
130 | ## `kaggle models update`
131 |
132 | Updates an existing model on Kaggle using a local `model-metadata.json` file.
133 |
134 | **Usage:**
135 |
136 | ```bash
137 | kaggle models update -p
138 | ```
139 |
140 | **Options:**
141 |
142 | * `-p, --path `: Path to the folder containing the `model-metadata.json` file with the updated information (defaults to the current directory).
143 |
144 | **Example:**
145 |
146 | Update the model whose details are in `tmp/model-metadata.json` (ensure the slug and owner in the JSON match an existing model):
147 |
148 | ```bash
149 | kaggle models update -p tmp
150 | ```
151 |
152 | **Purpose:**
153 |
154 | Use this command to change the metadata of an existing model, such as its title, description, or other fields defined in the `model-metadata.json` file.
155 |
156 | ## `kaggle models delete`
157 |
158 | Deletes a model from Kaggle.
159 |
160 | **Usage:**
161 |
162 | ```bash
163 | kaggle models delete [options]
164 | ```
165 |
166 | **Arguments:**
167 |
168 | * ``: Model URL suffix in the format `owner/model-slug` (e.g., `$KAGGLE_DEVELOPER/test-model`).
169 |
170 | **Options:**
171 |
172 | * `-y, --yes`: Automatically confirm deletion without prompting.
173 |
174 | **Example:**
175 |
176 | Delete the model `$KAGGLE_DEVELOPER/test-model` and automatically confirm:
177 |
178 | ```bash
179 | kaggle models delete $KAGGLE_DEVELOPER/test-model -y
180 | ```
181 |
182 | **Purpose:**
183 |
184 | This command permanently removes one of your models (and all its instances and versions) from Kaggle. Use with caution.
185 |
--------------------------------------------------------------------------------
/integration_tests/test_models.py:
--------------------------------------------------------------------------------
1 | import os
2 | import unittest
3 | from typing import List
4 |
5 | from kaggle.api.kaggle_api_extended import KaggleApi
6 |
7 | MODEL_HANDLE = "keras/bert"
8 | MODEL_ID = 2819
9 |
10 | # TODO(messick) Add a test that creates a dataset w/o specifying privacy that is created private.
11 |
12 |
13 | class TestModels(unittest.TestCase):
14 | def setUp(self):
15 | self.api = KaggleApi()
16 | self.api.authenticate()
17 |
18 | def test_list_models(self) -> None:
19 | models = self.api.model_list()
20 | self.assertGreater(len(models), 0)
21 |
22 | def test_get_model(self) -> None:
23 | model = self.api.model_get(MODEL_HANDLE)
24 | self.assertEqual(MODEL_ID, model.id)
25 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [build-system]
2 | requires = [
3 | "hatchling",
4 | ]
5 | build-backend = "hatchling.build"
6 |
7 | [project]
8 | name = "kaggle"
9 | dynamic = [
10 | "version",
11 | ]
12 | description = "Access Kaggle resources anywhere"
13 | authors = [
14 | { name = "Kaggle", email = "support@kaggle.com" },
15 | ]
16 | license = { file = "LICENSE.txt" }
17 | readme = "README.md"
18 | classifiers = [
19 | "Programming Language :: Python :: 3",
20 | "License :: OSI Approved :: Apache Software License",
21 | "Operating System :: OS Independent",
22 | ]
23 | keywords = ["Kaggle", "API"]
24 | requires-python = ">=3.11"
25 | dependencies = [
26 | "bleach",
27 | "python-slugify",
28 | "requests",
29 | "setuptools>=21.0.0", # unused if setup.py is removed
30 | "six>=1.10", # should be unused but is still imported
31 | "tqdm",
32 | "urllib3>=1.15.1",
33 | "protobuf",
34 | "black>=24.10.0",
35 | "mypy>=1.15.0",
36 | # Pre-install `types-*` packages to speed up lint:typing command.
37 | "types-requests",
38 | "types-tqdm",
39 | ]
40 |
41 | [project.scripts]
42 | kaggle = "kaggle.cli:main"
43 |
44 | [project.urls]
45 | Homepage = "https://github.com/Kaggle/kaggle-api"
46 | Issues = "https://github.com/Kaggle/kaggle-api/issues"
47 |
48 | [tool.hatch.version]
49 | path = "src/kaggle/__init__.py"
50 |
51 | [tool.hatch.envs.default]
52 | dependencies = [
53 | "pytest",
54 | ]
55 |
56 | [tool.hatch.build.targets.wheel]
57 | packages = ["src/kaggle", "src/kagglesdk"]
58 |
59 | [tool.hatch.envs.default.scripts]
60 | install-unzip = """sudo apt-get install -y unzip || echo 'unzip could not be installed'"""
61 | # TODO: install in Mac/Windows
62 | install-black = """pip3 install black --break-system-packages || echo 'black could not be installed'"""
63 | install-toml = """sudo apt-get install -y python3-toml || echo 'toml could not be installed'"""
64 | install-deps = "hatch run install-unzip && hatch run install-black && hatch run install-toml"
65 |
66 | integration-test = "pytest {args:integration_tests}"
67 |
68 | compile = "./tools/GeneratePythonLibrary.sh"
69 | install = "./tools/GeneratePythonLibrary.sh --install"
70 | watch = "./tools/GeneratePythonLibrary.sh --watch"
71 | test = "./tools/GeneratePythonLibrary.sh --test local"
72 |
73 | [tool.docformatter]
74 | recursive = true
75 |
76 | [tool.black]
77 | target-version = ["py311"]
78 | line-length = 120
79 | skip-string-normalization = true
80 | extend-exclude = 'kagglesdk'
81 |
82 | [tool.mypy]
83 | strict = false
84 | show_error_codes = true
85 | follow_imports = 'silent'
86 | exclude = '''(?x)(
87 | /src/
88 | )'''
89 | python_version = 3.11
90 |
91 | # Start off with these
92 | warn_unused_configs = true
93 | warn_redundant_casts = true
94 | warn_unused_ignores = true
95 |
96 | # Getting these passing should be easy
97 | strict_equality = true
98 | extra_checks = true
99 |
100 | # Strongly recommend enabling this one as soon as you can
101 | check_untyped_defs = true
102 |
103 | # These shouldn't be too much additional work, but may be tricky to
104 | # get passing if you use a lot of untyped libraries
105 | disallow_subclassing_any = true
106 | disallow_untyped_decorators = true
107 | disallow_any_generics = true
108 |
109 | # These next few are various gradations of forcing use of type annotations
110 | disallow_incomplete_defs = true
111 | # TODO Enable these after GA.
112 | #disallow_untyped_calls = true # 167 errors reported a/o April 30, 2025
113 | #disallow_untyped_defs = true # 132
114 |
115 | # This one isn't too hard to get passing, but return on investment is lower
116 | no_implicit_reexport = true # 50
117 |
118 | # This one can be tricky to get passing if you use a lot of untyped libraries
119 | warn_return_any = true
120 |
--------------------------------------------------------------------------------
/requirements.in:
--------------------------------------------------------------------------------
1 | certifi >= 14.05.14
2 | six >= 1.10
3 | python_dateutil >= 2.5.3
4 | urllib3 >= 1.15.1
5 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | #
2 | # This file is autogenerated by pip-compile with Python 3.11
3 | # by the following command:
4 | #
5 | # pip-compile --allow-unsafe --generate-hashes requirements.in
6 | #
7 | certifi==2025.1.31 \
8 | --hash=sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651 \
9 | --hash=sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe
10 | # via -r requirements.in
11 | python-dateutil==2.9.0.post0 \
12 | --hash=sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3 \
13 | --hash=sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427
14 | # via -r requirements.in
15 | six==1.17.0 \
16 | --hash=sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274 \
17 | --hash=sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81
18 | # via
19 | # -r requirements.in
20 | # python-dateutil
21 | urllib3==2.3.0 \
22 | --hash=sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df \
23 | --hash=sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d
24 | # via -r requirements.in
25 |
--------------------------------------------------------------------------------
/src/kaggle/__init__.py:
--------------------------------------------------------------------------------
1 | # coding=utf-8
2 | from __future__ import absolute_import
3 | from kaggle.api.kaggle_api_extended import KaggleApi
4 |
5 | __version__ = "1.7.5.0.dev0"
6 |
7 | api = KaggleApi()
8 | api.authenticate()
9 |
--------------------------------------------------------------------------------
/src/kaggle/api/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kaggle/kaggle-api/23981a5f0f61818a29bde74d0bcd8b86a8b610e8/src/kaggle/api/__init__.py
--------------------------------------------------------------------------------
/src/kaggle/configuration.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | #
3 | # Copyright 2024 Kaggle Inc
4 | #
5 | # Licensed under the Apache License, Version 2.0 (the "License");
6 | # you may not use this file except in compliance with the License.
7 | # You may obtain a copy of the License at
8 | #
9 | # http://www.apache.org/licenses/LICENSE-2.0
10 | #
11 | # Unless required by applicable law or agreed to in writing, software
12 | # distributed under the License is distributed on an "AS IS" BASIS,
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | # See the License for the specific language governing permissions and
15 | # limitations under the License.
16 |
17 | # coding: utf-8
18 | from __future__ import absolute_import
19 |
20 | import logging
21 | import six
22 | import sys
23 | import urllib3
24 | from typing import Dict, Union
25 |
26 |
27 | class Configuration:
28 |
29 | def __init__(self) -> None:
30 | """Constructor."""
31 | # Default Base url
32 | self.host = _get_endpoint_from_env() or "https://www.kaggle.com/api/v1"
33 | self.proxy: Union[str, None] = None
34 | self.ssl_ca_cert: Union[str, None] = None
35 |
36 | # Authentication Settings
37 | # dict to store API key(s)
38 | self.api_key = {}
39 | # dict to store API prefix (e.g. Bearer)
40 | self.api_key_prefix = {}
41 | # Username for HTTP basic authentication
42 | self.username = ""
43 | # Password for HTTP basic authentication
44 | self.password = ""
45 |
46 | # Logging Settings
47 | self.logger = {"package_logger": logging.getLogger("kaggle"), "urllib3_logger": logging.getLogger("urllib3")}
48 | # Log format
49 | self.logger_format = '%(asctime)s %(levelname)s %(message)s'
50 | # Log stream handler
51 | self.logger_stream_handler = None
52 | # Log file handler
53 | self.logger_file_handler = None
54 | # Debug file location
55 | self.logger_file = None
56 | # Debug switch
57 | self.debug = False
58 |
59 | @property
60 | def logger_file(self):
61 | """The logger file.
62 |
63 | If the logger_file is None, then add stream handler and remove file
64 | handler. Otherwise, add file handler and remove stream handler.
65 |
66 | :param value: The logger_file path.
67 | :type: str
68 | """
69 | return self.__logger_file
70 |
71 | @logger_file.setter
72 | def logger_file(self, value) -> None:
73 | """The logger file.
74 |
75 | If the logger_file is None, then add stream handler and remove file
76 | handler. Otherwise, add file handler and remove stream handler.
77 |
78 | :param value: The logger_file path.
79 | :type: str
80 | """
81 | self.__logger_file = value
82 | if self.__logger_file:
83 | # If set logging file,
84 | # then add file handler and remove stream handler.
85 | self.logger_file_handler = logging.FileHandler(self.__logger_file)
86 | self.logger_file_handler.setFormatter(self.logger_formatter)
87 | for _, logger in six.iteritems(self.logger):
88 | logger.addHandler(self.logger_file_handler)
89 | if self.logger_stream_handler:
90 | logger.removeHandler(self.logger_stream_handler)
91 | else:
92 | # If not set logging file,
93 | # then add stream handler and remove file handler.
94 | self.logger_stream_handler = logging.StreamHandler()
95 | self.logger_stream_handler.setFormatter(self.logger_formatter)
96 | for _, logger in six.iteritems(self.logger):
97 | logger.addHandler(self.logger_stream_handler)
98 | if self.logger_file_handler:
99 | logger.removeHandler(self.logger_file_handler)
100 |
101 | @property
102 | def debug(self):
103 | """Debug status.
104 |
105 | :param value: The debug status, True or False.
106 | :type: bool
107 | """
108 | return self.__debug
109 |
110 | @debug.setter
111 | def debug(self, value) -> None:
112 | """Debug status.
113 |
114 | :param value: The debug status, True or False.
115 | :type: bool
116 | """
117 | self.__debug = value
118 | if self.__debug:
119 | # if debug status is True, turn on debug logging
120 | for _, logger in six.iteritems(self.logger):
121 | logger.setLevel(logging.DEBUG)
122 | else:
123 | # if debug status is False, turn off debug logging,
124 | # setting log level to default `logging.WARNING`
125 | for _, logger in six.iteritems(self.logger):
126 | logger.setLevel(logging.WARNING)
127 |
128 | @property
129 | def logger_format(self):
130 | """The logger format.
131 |
132 | The logger_formatter will be updated when sets logger_format.
133 |
134 | :param value: The format string.
135 | :type: str
136 | """
137 | return self.__logger_format
138 |
139 | @logger_format.setter
140 | def logger_format(self, value) -> None:
141 | """The logger format.
142 |
143 | The logger_formatter will be updated when sets logger_format.
144 |
145 | :param value: The format string.
146 | :type: str
147 | """
148 | self.__logger_format = value
149 | self.logger_formatter = logging.Formatter(self.__logger_format)
150 |
151 | def get_api_key_with_prefix(self, identifier):
152 | """Gets API key (with prefix if set).
153 |
154 | :param identifier: The identifier of apiKey.
155 | :return: The token for api key authentication.
156 | """
157 | if self.api_key.get(identifier) and self.api_key_prefix.get(identifier):
158 | return self.api_key_prefix[identifier] + ' ' + self.api_key[identifier] # noqa: E501
159 | elif self.api_key.get(identifier):
160 | return self.api_key[identifier]
161 |
162 | def get_basic_auth_token(self):
163 | """Gets HTTP basic authentication header (string).
164 |
165 | :return: The token for basic HTTP authentication.
166 | """
167 | return urllib3.util.make_headers(basic_auth=self.username + ':' + self.password).get('authorization')
168 |
169 | def auth_settings(self) -> Dict[str, Dict[str, str]]:
170 | """Gets Auth Settings dict for api client.
171 |
172 | :return: The Auth Settings information dict.
173 | """
174 | return {
175 | 'basicAuth': {
176 | 'type': 'basic',
177 | 'in': 'header',
178 | 'key': 'Authorization',
179 | 'value': self.get_basic_auth_token(),
180 | },
181 | }
182 |
183 | def to_debug_report(self) -> str:
184 | """Gets the essential information for debugging.
185 |
186 | :return: The report for debugging.
187 | """
188 | return (
189 | "Python SDK Debug Report:\n"
190 | "OS: {env}\n"
191 | "Python Version: {pyversion}\n"
192 | "Version of the API: 1\n"
193 | "SDK Package Version: 1".format(env=sys.platform, pyversion=sys.version)
194 | )
195 |
196 |
197 | def _get_endpoint_from_env() -> None:
198 | import os
199 |
200 | endpoint = os.environ.get("KAGGLE_API_ENDPOINT")
201 | if endpoint is None:
202 | return None
203 | endpoint = endpoint.rstrip("/")
204 | if endpoint.endswith("/api/v1"):
205 | return endpoint
206 | return endpoint + "/api/v1"
207 |
--------------------------------------------------------------------------------
/src/kaggle/models/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kaggle/kaggle-api/23981a5f0f61818a29bde74d0bcd8b86a8b610e8/src/kaggle/models/__init__.py
--------------------------------------------------------------------------------
/src/kaggle/models/api_blob_type.py:
--------------------------------------------------------------------------------
1 | class ApiBlobType(object):
2 | DATASET = "dataset"
3 | MODEL = "model"
4 | INBOX = "inbox"
5 |
--------------------------------------------------------------------------------
/src/kaggle/models/dataset_column.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | #
3 | # Copyright 2024 Kaggle Inc
4 | #
5 | # Licensed under the Apache License, Version 2.0 (the "License");
6 | # you may not use this file except in compliance with the License.
7 | # You may obtain a copy of the License at
8 | #
9 | # http://www.apache.org/licenses/LICENSE-2.0
10 | #
11 | # Unless required by applicable law or agreed to in writing, software
12 | # distributed under the License is distributed on an "AS IS" BASIS,
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | # See the License for the specific language governing permissions and
15 | # limitations under the License.
16 |
17 | # coding: utf-8
18 |
19 | import pprint
20 | import re # noqa: F401
21 |
22 | import six
23 |
24 |
25 | class DatasetColumn(object):
26 | """
27 | Attributes:
28 | column_types (dict): The key is attribute name
29 | and the value is attribute type.
30 | attribute_map (dict): The key is attribute name
31 | and the value is json key in definition.
32 | """
33 |
34 | column_types = {'order': 'float', 'name': 'str', 'type': 'str', 'original_type': 'str', 'description': 'str'}
35 |
36 | attribute_map = {
37 | 'order': 'order',
38 | 'name': 'name',
39 | 'type': 'type',
40 | 'original_type': 'originalType',
41 | 'description': 'description',
42 | }
43 |
44 | def __init__(self, order=None, name=None, type=None, original_type=None, description=None): # noqa: E501
45 | """DatasetColumn - a model defined in Swagger""" # noqa: E501
46 |
47 | self._order = None
48 | self._name = None
49 | self._type = None
50 | self._original_type = None
51 | self._description = None
52 | self.discriminator = None
53 |
54 | if order is not None:
55 | self.order = order
56 | if name is not None:
57 | self.name = name
58 | if type is not None:
59 | self.type = type
60 | if original_type is not None:
61 | self.original_type = original_type
62 | if description is not None:
63 | self.description = description
64 |
65 | @property
66 | def order(self):
67 | """Gets the order of this DatasetColumn. # noqa: E501.
68 |
69 | The order that the column comes in, 0-based. (The first column is 0,
70 | second is 1, etc.) # noqa: E501
71 |
72 | :return: The order of this DatasetColumn. # noqa: E501
73 | :rtype: float
74 | """
75 | return self._order
76 |
77 | @order.setter
78 | def order(self, order):
79 | """Sets the order of this DatasetColumn.
80 |
81 | The order that the column comes in, 0-based. (The first column is 0,
82 | second is 1, etc.) # noqa: E501
83 |
84 | :param order: The order of this DatasetColumn. # noqa: E501
85 | :type: float
86 | """
87 |
88 | self._order = order
89 |
90 | @property
91 | def name(self):
92 | """Gets the name of this DatasetColumn. # noqa: E501.
93 |
94 | The column name # noqa: E501
95 |
96 | :return: The name of this DatasetColumn. # noqa: E501
97 | :rtype: str
98 | """
99 | return self._name
100 |
101 | @name.setter
102 | def name(self, name):
103 | """Sets the name of this DatasetColumn.
104 |
105 | The column name # noqa: E501
106 |
107 | :param name: The name of this DatasetColumn. # noqa: E501
108 | :type: str
109 | """
110 |
111 | self._name = name
112 |
113 | @property
114 | def type(self):
115 | """Gets the type of this DatasetColumn. # noqa: E501.
116 |
117 | The type of all of the fields in the column. Please see the data
118 | types on
119 | https://github.com/Kaggle/kaggle-api/wiki/Dataset-Metadata
120 | # noqa: E501
121 |
122 | :return: The type of this DatasetColumn. # noqa: E501
123 | :rtype: str
124 | """
125 | return self._type
126 |
127 | @type.setter
128 | def type(self, type):
129 | """Sets the type of this DatasetColumn.
130 |
131 | The type of all of the fields in the column. Please see the data
132 | types on
133 | https://github.com/Kaggle/kaggle-api/wiki/Dataset-Metadata
134 | # noqa: E501
135 |
136 | :param type: The type of this DatasetColumn. # noqa: E501
137 | :type: str
138 | """
139 |
140 | self._type = type
141 |
142 | @property
143 | def original_type(self):
144 | """Gets the original_type of this DatasetColumn. # noqa: E501.
145 |
146 | Used to store the original type of the column, which will be converted to Kaggle's types. For example, an `originalType` of `\"integer\"` would convert to a `type` of `\"numeric\"` # noqa: E501
147 |
148 | :return: The original_type of this DatasetColumn. # noqa: E501
149 | :rtype: str
150 | """
151 | return self._original_type
152 |
153 | @original_type.setter
154 | def original_type(self, original_type):
155 | """Sets the original_type of this DatasetColumn.
156 |
157 | Used to store the original type of the column, which will be converted to Kaggle's types. For example, an `originalType` of `\"integer\"` would convert to a `type` of `\"numeric\"` # noqa: E501
158 |
159 | :param original_type: The original_type of this DatasetColumn. # noqa: E501
160 | :type: str
161 | """
162 |
163 | self._original_type = original_type
164 |
165 | @property
166 | def description(self):
167 | """Gets the description of this DatasetColumn. # noqa: E501.
168 |
169 | The description of the column # noqa: E501
170 |
171 | :return: The description of this DatasetColumn. # noqa: E501
172 | :rtype: str
173 | """
174 | return self._description
175 |
176 | @description.setter
177 | def description(self, description):
178 | """Sets the description of this DatasetColumn.
179 |
180 | The description of the column # noqa: E501
181 |
182 | :param description: The description of this DatasetColumn. # noqa:
183 | E501
184 | :type: str
185 | """
186 |
187 | self._description = description
188 |
189 | def to_dict(self):
190 | """Returns the model properties as a dict."""
191 | result = {}
192 |
193 | for attr, _ in six.iteritems(self.column_types):
194 | value = getattr(self, attr)
195 | if isinstance(value, list):
196 | result[attr] = list(map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value))
197 | elif hasattr(value, "to_dict"):
198 | result[attr] = value.to_dict()
199 | elif isinstance(value, dict):
200 | result[attr] = dict(
201 | map(
202 | lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item,
203 | value.items(),
204 | )
205 | )
206 | else:
207 | result[attr] = value
208 |
209 | return result
210 |
211 | def to_str(self):
212 | """Returns the string representation of the model."""
213 | return pprint.pformat(self.to_dict())
214 |
215 | def __repr__(self):
216 | """For `print` and `pprint`"""
217 | return self.to_str()
218 |
219 | def __eq__(self, other):
220 | """Returns true if both objects are equal."""
221 | if not isinstance(other, DatasetColumn):
222 | return False
223 |
224 | return self.__dict__ == other.__dict__
225 |
226 | def __ne__(self, other):
227 | """Returns true if both objects are not equal."""
228 | return not self == other
229 |
--------------------------------------------------------------------------------
/src/kaggle/models/model_instance_new_version_request.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | #
3 | # Copyright 2024 Kaggle Inc
4 | #
5 | # Licensed under the Apache License, Version 2.0 (the "License");
6 | # you may not use this file except in compliance with the License.
7 | # You may obtain a copy of the License at
8 | #
9 | # http://www.apache.org/licenses/LICENSE-2.0
10 | #
11 | # Unless required by applicable law or agreed to in writing, software
12 | # distributed under the License is distributed on an "AS IS" BASIS,
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | # See the License for the specific language governing permissions and
15 | # limitations under the License.
16 |
17 | # coding: utf-8
18 |
19 | import pprint
20 | import re # noqa: F401
21 |
22 | import six
23 |
24 | from kaggle.models.upload_file import UploadFile # noqa: F401,E501
25 |
26 |
27 | class ModelInstanceNewVersionRequest(object):
28 | """
29 | Attributes:
30 | project_types (dict): The key is attribute name
31 | and the value is attribute type.
32 | attribute_map (dict): The key is attribute name
33 | and the value is json key in definition.
34 | """
35 |
36 | project_types = {'version_notes': 'str', 'files': 'list[UploadFile]'}
37 |
38 | attribute_map = {'version_notes': 'versionNotes', 'files': 'files'}
39 |
40 | def __init__(self, version_notes=None, files=None): # noqa: E501
41 |
42 | self._version_notes = None
43 | self._files = None
44 | self.discriminator = None
45 |
46 | if version_notes is not None:
47 | self.version_notes = version_notes
48 | self.files = files
49 |
50 | @property
51 | def version_notes(self):
52 | """Gets the version_notes of this ModelInstanceNewVersionRequest. #
53 | noqa: E501.
54 |
55 | The version notes for the model instance version # noqa: E501
56 |
57 | :return: The version_notes of this
58 | ModelInstanceNewVersionRequest. # noqa: E501
59 | :rtype: str
60 | """
61 | return self._version_notes
62 |
63 | @version_notes.setter
64 | def version_notes(self, version_notes):
65 | """Sets the version_notes of this ModelInstanceNewVersionRequest.
66 |
67 | The version notes for the model instance version # noqa: E501
68 |
69 | :param version_notes: The version_notes of this
70 | ModelInstanceNewVersionRequest. # noqa: E501
71 | :type: str
72 | """
73 |
74 | self._version_notes = version_notes
75 |
76 | @property
77 | def files(self):
78 | """Gets the files of this ModelInstanceNewVersionRequest. # noqa:
79 | E501.
80 |
81 | A list of files that should be associated with the model
82 | instance version # noqa: E501
83 |
84 | :return: The files of this ModelInstanceNewVersionRequest. #
85 | noqa: E501
86 | :rtype: list[UploadFile]
87 | """
88 | return self._files
89 |
90 | @files.setter
91 | def files(self, files):
92 | """Sets the files of this ModelInstanceNewVersionRequest.
93 |
94 | A list of files that should be associated with the model
95 | instance version # noqa: E501
96 |
97 | :param files: The files of this ModelInstanceNewVersionRequest.
98 | # noqa: E501
99 | :type: list[UploadFile]
100 | """
101 | if files is None:
102 | raise ValueError("Invalid value for `files`, must not be `None`") # noqa: E501
103 |
104 | self._files = files
105 |
106 | def to_dict(self):
107 | """Returns the model properties as a dict."""
108 | result = {}
109 |
110 | for attr, _ in six.iteritems(self.project_types):
111 | value = getattr(self, attr)
112 | if isinstance(value, list):
113 | result[attr] = list(map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value))
114 | elif hasattr(value, "to_dict"):
115 | result[attr] = value.to_dict()
116 | elif isinstance(value, dict):
117 | result[attr] = dict(
118 | map(
119 | lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item,
120 | value.items(),
121 | )
122 | )
123 | else:
124 | result[attr] = value
125 |
126 | return result
127 |
128 | def to_str(self):
129 | """Returns the string representation of the model."""
130 | return pprint.pformat(self.to_dict())
131 |
132 | def __repr__(self):
133 | """For `print` and `pprint`"""
134 | return self.to_str()
135 |
136 | def __eq__(self, other):
137 | """Returns true if both objects are equal."""
138 | if not isinstance(other, ModelInstanceNewVersionRequest):
139 | return False
140 |
141 | return self.__dict__ == other.__dict__
142 |
143 | def __ne__(self, other):
144 | """Returns true if both objects are not equal."""
145 | return not self == other
146 |
--------------------------------------------------------------------------------
/src/kaggle/models/start_blob_upload_response.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | #
3 | # Copyright 2024 Kaggle Inc
4 | #
5 | # Licensed under the Apache License, Version 2.0 (the "License");
6 | # you may not use this file except in compliance with the License.
7 | # You may obtain a copy of the License at
8 | #
9 | # http://www.apache.org/licenses/LICENSE-2.0
10 | #
11 | # Unless required by applicable law or agreed to in writing, software
12 | # distributed under the License is distributed on an "AS IS" BASIS,
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | # See the License for the specific language governing permissions and
15 | # limitations under the License.
16 |
17 | # coding: utf-8
18 |
19 | import pprint
20 | import re # noqa: F401
21 |
22 | import six
23 |
24 |
25 | class StartBlobUploadResponse(object):
26 | """
27 | Attributes:
28 | project_types (dict): The key is attribute name
29 | and the value is attribute type.
30 | attribute_map (dict): The key is attribute name
31 | and the value is json key in definition.
32 | """
33 |
34 | project_types = {'token': 'str', 'create_url': 'str'}
35 |
36 | attribute_map = {'token': 'token', 'create_url': 'createUrl'}
37 |
38 | def __init__(self, token=None, create_url=None): # noqa: E501
39 | """StartBlobUploadResponse - a model defined in Swagger""" # noqa: E501
40 |
41 | self._token = None
42 | self._create_url = None
43 | self.discriminator = None
44 |
45 | self.token = token
46 | self.create_url = create_url
47 |
48 | @property
49 | def token(self):
50 | """Gets the token of this StartBlobUploadResponse. # noqa: E501.
51 |
52 | Opaque string token used to reference the new blob/file. # noqa:
53 | E501
54 |
55 | :return: The token of this StartBlobUploadResponse. # noqa: E501
56 | :rtype: str
57 | """
58 | return self._token
59 |
60 | @token.setter
61 | def token(self, token):
62 | """Sets the token of this StartBlobUploadResponse.
63 |
64 | Opaque string token used to reference the new blob/file. # noqa:
65 | E501
66 |
67 | :param token: The token of this StartBlobUploadResponse. # noqa:
68 | E501
69 | :type: str
70 | """
71 | if token is None:
72 | raise ValueError("Invalid value for `token`, must not be `None`") # noqa: E501
73 |
74 | self._token = token
75 |
76 | @property
77 | def create_url(self):
78 | """Gets the create_url of this StartBlobUploadResponse. # noqa: E501.
79 |
80 | URL to use to start the upload. # noqa: E501
81 |
82 | :return: The create_url of this StartBlobUploadResponse. # noqa:
83 | E501
84 | :rtype: str
85 | """
86 | return self._create_url
87 |
88 | @create_url.setter
89 | def create_url(self, create_url):
90 | """Sets the create_url of this StartBlobUploadResponse.
91 |
92 | URL to use to start the upload. # noqa: E501
93 |
94 | :param create_url: The create_url of this StartBlobUploadResponse. #
95 | noqa: E501
96 | :type: str
97 | """
98 | if create_url is None:
99 | raise ValueError("Invalid value for `create_url`, must not be `None`") # noqa: E501
100 |
101 | self._create_url = create_url
102 |
103 | def to_dict(self):
104 | """Returns the model properties as a dict."""
105 | result = {}
106 |
107 | for attr, _ in six.iteritems(self.project_types):
108 | value = getattr(self, attr)
109 | if isinstance(value, list):
110 | result[attr] = list(map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value))
111 | elif hasattr(value, "to_dict"):
112 | result[attr] = value.to_dict()
113 | elif isinstance(value, dict):
114 | result[attr] = dict(
115 | map(
116 | lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item,
117 | value.items(),
118 | )
119 | )
120 | else:
121 | result[attr] = value
122 |
123 | return result
124 |
125 | def to_str(self):
126 | """Returns the string representation of the model."""
127 | return pprint.pformat(self.to_dict())
128 |
129 | def __repr__(self):
130 | """For `print` and `pprint`"""
131 | return self.to_str()
132 |
133 | def __eq__(self, other):
134 | """Returns true if both objects are equal."""
135 | if not isinstance(other, StartBlobUploadResponse):
136 | return False
137 |
138 | return self.__dict__ == other.__dict__
139 |
140 | def __ne__(self, other):
141 | """Returns true if both objects are not equal."""
142 | return not self == other
143 |
--------------------------------------------------------------------------------
/src/kaggle/models/upload_file.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | #
3 | # Copyright 2024 Kaggle Inc
4 | #
5 | # Licensed under the Apache License, Version 2.0 (the "License");
6 | # you may not use this file except in compliance with the License.
7 | # You may obtain a copy of the License at
8 | #
9 | # http://www.apache.org/licenses/LICENSE-2.0
10 | #
11 | # Unless required by applicable law or agreed to in writing, software
12 | # distributed under the License is distributed on an "AS IS" BASIS,
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | # See the License for the specific language governing permissions and
15 | # limitations under the License.
16 |
17 | # coding: utf-8
18 |
19 | import pprint
20 | import re # noqa: F401
21 |
22 | import six
23 |
24 | from kaggle.models.dataset_column import DatasetColumn # noqa: F401,E501
25 |
26 |
27 | class UploadFile(object):
28 | """
29 | Attributes:
30 | column_types (dict): The key is attribute name
31 | and the value is attribute type.
32 | attribute_map (dict): The key is attribute name
33 | and the value is json key in definition.
34 | """
35 |
36 | column_types = {'token': 'str', 'description': 'str', 'columns': 'list[DatasetColumn]'}
37 |
38 | attribute_map = {'token': 'token', 'description': 'description', 'columns': 'columns'}
39 |
40 | def __init__(self, token=None, description=None, columns=None): # noqa: E501
41 | """UploadFile - a model defined in Swagger""" # noqa: E501
42 |
43 | self._token = None
44 | self._description = None
45 | self._columns = None
46 | self.discriminator = None
47 |
48 | if token is not None:
49 | self.token = token
50 | if description is not None:
51 | self.description = description
52 | if columns is not None:
53 | self.columns = columns
54 |
55 | @property
56 | def token(self):
57 | """Gets the token of this UploadFile. # noqa: E501.
58 |
59 | A token referencing a specific file upload that can be used across
60 | requests # noqa: E501
61 |
62 | :return: The token of this UploadFile. # noqa: E501
63 | :rtype: str
64 | """
65 | return self._token
66 |
67 | @token.setter
68 | def token(self, token):
69 | """Sets the token of this UploadFile.
70 |
71 | A token referencing a specific file upload that can be used across
72 | requests # noqa: E501
73 |
74 | :param token: The token of this UploadFile. # noqa: E501
75 | :type: str
76 | """
77 |
78 | self._token = token
79 |
80 | @property
81 | def description(self):
82 | """Gets the description of this UploadFile. # noqa: E501.
83 |
84 | The file description # noqa: E501
85 |
86 | :return: The description of this UploadFile. # noqa: E501
87 | :rtype: str
88 | """
89 | return self._description
90 |
91 | @description.setter
92 | def description(self, description):
93 | """Sets the description of this UploadFile.
94 |
95 | The file description # noqa: E501
96 |
97 | :param description: The description of this UploadFile. # noqa: E501
98 | :type: str
99 | """
100 |
101 | self._description = description
102 |
103 | @property
104 | def columns(self):
105 | """Gets the columns of this UploadFile. # noqa: E501.
106 |
107 | A list of dataset column metadata # noqa: E501
108 |
109 | :return: The columns of this UploadFile. # noqa: E501
110 | :rtype: list[DatasetColumn]
111 | """
112 | return self._columns
113 |
114 | @columns.setter
115 | def columns(self, columns):
116 | """Sets the columns of this UploadFile.
117 |
118 | A list of dataset column metadata # noqa: E501
119 |
120 | :param columns: The columns of this UploadFile. # noqa: E501
121 | :type: list[DatasetColumn]
122 | """
123 |
124 | self._columns = columns
125 |
126 | def to_dict(self):
127 | """Returns the model properties as a dict."""
128 | result = {}
129 |
130 | for attr, _ in six.iteritems(self.column_types):
131 | value = getattr(self, attr)
132 | if isinstance(value, list):
133 | result[attr] = list(map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value))
134 | elif hasattr(value, "to_dict"):
135 | result[attr] = value.to_dict()
136 | elif isinstance(value, dict):
137 | result[attr] = dict(
138 | map(
139 | lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item,
140 | value.items(),
141 | )
142 | )
143 | else:
144 | result[attr] = value
145 |
146 | return result
147 |
148 | def to_str(self):
149 | """Returns the string representation of the model."""
150 | return pprint.pformat(self.to_dict())
151 |
152 | def __repr__(self):
153 | """For `print` and `pprint`"""
154 | return self.to_str()
155 |
156 | def __eq__(self, other):
157 | """Returns true if both objects are equal."""
158 | if not isinstance(other, UploadFile):
159 | return False
160 |
161 | return self.__dict__ == other.__dict__
162 |
163 | def __ne__(self, other):
164 | """Returns true if both objects are not equal."""
165 | return not self == other
166 |
--------------------------------------------------------------------------------
/src/kaggle/test/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kaggle/kaggle-api/23981a5f0f61818a29bde74d0bcd8b86a8b610e8/src/kaggle/test/__init__.py
--------------------------------------------------------------------------------
/src/kaggle/test/test_authenticate.py:
--------------------------------------------------------------------------------
1 | from kaggle.api.kaggle_api_extended import KaggleApi
2 |
3 | # python -m unittest tests.test_authenticate
4 |
5 | import os
6 | import unittest
7 |
8 |
9 | class TestAuthenticate(unittest.TestCase):
10 |
11 | def setUp(self):
12 | print("setup class:%s" % self)
13 |
14 | def tearDown(self):
15 | print("teardown class:TestStuff")
16 |
17 | # Environment
18 |
19 | def test_environment_variables(self):
20 | os.environ['KAGGLE_USERNAME'] = 'dinosaur'
21 | os.environ['KAGGLE_KEY'] = 'xxxxxxxxxxxx'
22 | api = KaggleApi()
23 |
24 | # We haven't authenticated yet
25 | self.assertTrue("key" not in api.config_values)
26 | self.assertTrue("username" not in api.config_values)
27 | api.authenticate()
28 |
29 | # Should be set from the environment
30 | self.assertEqual(api.config_values['key'], 'xxxxxxxxxxxx')
31 | self.assertEqual(api.config_values['username'], 'dinosaur')
32 |
33 | # Configuration Actions
34 |
35 | def test_config_actions(self):
36 | api = KaggleApi()
37 |
38 | self.assertTrue(api.config_dir.endswith('kaggle'))
39 | self.assertEqual(api.get_config_value('doesntexist'), None)
40 |
41 |
42 | if __name__ == '__main__':
43 | unittest.main()
44 |
--------------------------------------------------------------------------------
/src/kagglesdk/__init__.py:
--------------------------------------------------------------------------------
1 | from kagglesdk.kaggle_client import KaggleClient
2 | from kagglesdk.kaggle_env import KaggleEnv
3 |
--------------------------------------------------------------------------------
/src/kagglesdk/admin/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kaggle/kaggle-api/23981a5f0f61818a29bde74d0bcd8b86a8b610e8/src/kagglesdk/admin/__init__.py
--------------------------------------------------------------------------------
/src/kagglesdk/admin/services/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kaggle/kaggle-api/23981a5f0f61818a29bde74d0bcd8b86a8b610e8/src/kagglesdk/admin/services/__init__.py
--------------------------------------------------------------------------------
/src/kagglesdk/admin/services/inbox_file_service.py:
--------------------------------------------------------------------------------
1 | from kagglesdk.admin.types.inbox_file_service import CreateInboxFileRequest, CreateInboxFileResponse
2 | from kagglesdk.kaggle_http_client import KaggleHttpClient
3 |
4 | class InboxFileClient(object):
5 | """File drop/pickup functionality."""
6 |
7 | def __init__(self, client: KaggleHttpClient):
8 | self._client = client
9 |
10 | def create_inbox_file(self, request: CreateInboxFileRequest = None) -> CreateInboxFileResponse:
11 | r"""
12 | Creates (aka 'drops') a new file into the inbox.
13 |
14 | Args:
15 | request (CreateInboxFileRequest):
16 | The request object; initialized to empty instance if not specified.
17 | """
18 |
19 | if request is None:
20 | request = CreateInboxFileRequest()
21 |
22 | return self._client.call("admin.InboxFileService", "CreateInboxFile", request, CreateInboxFileResponse)
23 |
--------------------------------------------------------------------------------
/src/kagglesdk/admin/types/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kaggle/kaggle-api/23981a5f0f61818a29bde74d0bcd8b86a8b610e8/src/kagglesdk/admin/types/__init__.py
--------------------------------------------------------------------------------
/src/kagglesdk/admin/types/inbox_file_service.py:
--------------------------------------------------------------------------------
1 | from kagglesdk.kaggle_object import *
2 |
3 | class CreateInboxFileRequest(KaggleObject):
4 | r"""
5 | Attributes:
6 | virtual_directory (str)
7 | Directory name used for tagging the uploaded file.
8 | blob_file_token (str)
9 | Token representing the uploaded file.
10 | """
11 |
12 | def __init__(self):
13 | self._virtual_directory = ""
14 | self._blob_file_token = ""
15 | self._freeze()
16 |
17 | @property
18 | def virtual_directory(self) -> str:
19 | """Directory name used for tagging the uploaded file."""
20 | return self._virtual_directory
21 |
22 | @virtual_directory.setter
23 | def virtual_directory(self, virtual_directory: str):
24 | if virtual_directory is None:
25 | del self.virtual_directory
26 | return
27 | if not isinstance(virtual_directory, str):
28 | raise TypeError('virtual_directory must be of type str')
29 | self._virtual_directory = virtual_directory
30 |
31 | @property
32 | def blob_file_token(self) -> str:
33 | """Token representing the uploaded file."""
34 | return self._blob_file_token
35 |
36 | @blob_file_token.setter
37 | def blob_file_token(self, blob_file_token: str):
38 | if blob_file_token is None:
39 | del self.blob_file_token
40 | return
41 | if not isinstance(blob_file_token, str):
42 | raise TypeError('blob_file_token must be of type str')
43 | self._blob_file_token = blob_file_token
44 |
45 | def endpoint(self):
46 | path = '/api/v1/inbox/files/create'
47 | return path.format_map(self.to_field_map(self))
48 |
49 |
50 | @staticmethod
51 | def method():
52 | return 'POST'
53 |
54 | @staticmethod
55 | def body_fields():
56 | return '*'
57 |
58 |
59 | class CreateInboxFileResponse(KaggleObject):
60 | r"""
61 | NOTE: This is sent to non-admins, so we're intentionally *NOT* sending back
62 | the full InboxFile (with its URL for a direct download).
63 |
64 | """
65 |
66 | pass
67 |
68 | CreateInboxFileRequest._fields = [
69 | FieldMetadata("virtualDirectory", "virtual_directory", "_virtual_directory", str, "", PredefinedSerializer()),
70 | FieldMetadata("blobFileToken", "blob_file_token", "_blob_file_token", str, "", PredefinedSerializer()),
71 | ]
72 |
73 | CreateInboxFileResponse._fields = []
74 |
75 |
--------------------------------------------------------------------------------
/src/kagglesdk/blobs/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kaggle/kaggle-api/23981a5f0f61818a29bde74d0bcd8b86a8b610e8/src/kagglesdk/blobs/__init__.py
--------------------------------------------------------------------------------
/src/kagglesdk/blobs/services/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kaggle/kaggle-api/23981a5f0f61818a29bde74d0bcd8b86a8b610e8/src/kagglesdk/blobs/services/__init__.py
--------------------------------------------------------------------------------
/src/kagglesdk/blobs/services/blob_api_service.py:
--------------------------------------------------------------------------------
1 | from kagglesdk.blobs.types.blob_api_service import ApiStartBlobUploadRequest, ApiStartBlobUploadResponse
2 | from kagglesdk.kaggle_http_client import KaggleHttpClient
3 |
4 | class BlobApiClient(object):
5 | r"""
6 | Binary Large OBject (BLOB) service used for uploading files to Google Cloud
7 | Storage (GCS).
8 | """
9 |
10 | def __init__(self, client: KaggleHttpClient):
11 | self._client = client
12 |
13 | def start_blob_upload(self, request: ApiStartBlobUploadRequest = None) -> ApiStartBlobUploadResponse:
14 | r"""
15 | Starts a blob upload (i.e. reserves a spot for the upload on GCS).
16 |
17 | Args:
18 | request (ApiStartBlobUploadRequest):
19 | The request object; initialized to empty instance if not specified.
20 | """
21 |
22 | if request is None:
23 | request = ApiStartBlobUploadRequest()
24 |
25 | return self._client.call("blobs.BlobApiService", "ApiStartBlobUpload", request, ApiStartBlobUploadResponse)
26 |
--------------------------------------------------------------------------------
/src/kagglesdk/blobs/types/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kaggle/kaggle-api/23981a5f0f61818a29bde74d0bcd8b86a8b610e8/src/kagglesdk/blobs/types/__init__.py
--------------------------------------------------------------------------------
/src/kagglesdk/blobs/types/blob_api_service.py:
--------------------------------------------------------------------------------
1 | import enum
2 | from kagglesdk.kaggle_object import *
3 | from typing import Optional
4 |
5 | class ApiBlobType(enum.Enum):
6 | API_BLOB_TYPE_UNSPECIFIED = 0
7 | DATASET = 1
8 | MODEL = 2
9 | INBOX = 3
10 |
11 | class ApiStartBlobUploadRequest(KaggleObject):
12 | r"""
13 | Attributes:
14 | type (ApiBlobType)
15 | The type of the blob.
16 | name (str)
17 | Name (e.g. file name) of the blob.
18 | content_type (str)
19 | Content/MIME type (e.g. 'text/plain').
20 | content_length (int)
21 | Size in bytes of the blob.
22 | last_modified_epoch_seconds (int)
23 | Optional user-reported time when the blob was last updated/modified.
24 | """
25 |
26 | def __init__(self):
27 | self._type = ApiBlobType.API_BLOB_TYPE_UNSPECIFIED
28 | self._name = ""
29 | self._content_type = None
30 | self._content_length = 0
31 | self._last_modified_epoch_seconds = None
32 | self._freeze()
33 |
34 | @property
35 | def type(self) -> 'ApiBlobType':
36 | """The type of the blob."""
37 | return self._type
38 |
39 | @type.setter
40 | def type(self, type: 'ApiBlobType'):
41 | if type is None:
42 | del self.type
43 | return
44 | if not isinstance(type, ApiBlobType):
45 | raise TypeError('type must be of type ApiBlobType')
46 | self._type = type
47 |
48 | @property
49 | def name(self) -> str:
50 | """Name (e.g. file name) of the blob."""
51 | return self._name
52 |
53 | @name.setter
54 | def name(self, name: str):
55 | if name is None:
56 | del self.name
57 | return
58 | if not isinstance(name, str):
59 | raise TypeError('name must be of type str')
60 | self._name = name
61 |
62 | @property
63 | def content_type(self) -> str:
64 | """Content/MIME type (e.g. 'text/plain')."""
65 | return self._content_type or ""
66 |
67 | @content_type.setter
68 | def content_type(self, content_type: Optional[str]):
69 | if content_type is None:
70 | del self.content_type
71 | return
72 | if not isinstance(content_type, str):
73 | raise TypeError('content_type must be of type str')
74 | self._content_type = content_type
75 |
76 | @property
77 | def content_length(self) -> int:
78 | """Size in bytes of the blob."""
79 | return self._content_length
80 |
81 | @content_length.setter
82 | def content_length(self, content_length: int):
83 | if content_length is None:
84 | del self.content_length
85 | return
86 | if not isinstance(content_length, int):
87 | raise TypeError('content_length must be of type int')
88 | self._content_length = content_length
89 |
90 | @property
91 | def last_modified_epoch_seconds(self) -> int:
92 | """Optional user-reported time when the blob was last updated/modified."""
93 | return self._last_modified_epoch_seconds or 0
94 |
95 | @last_modified_epoch_seconds.setter
96 | def last_modified_epoch_seconds(self, last_modified_epoch_seconds: Optional[int]):
97 | if last_modified_epoch_seconds is None:
98 | del self.last_modified_epoch_seconds
99 | return
100 | if not isinstance(last_modified_epoch_seconds, int):
101 | raise TypeError('last_modified_epoch_seconds must be of type int')
102 | self._last_modified_epoch_seconds = last_modified_epoch_seconds
103 |
104 | def endpoint(self):
105 | path = '/api/v1/blobs/upload'
106 | return path.format_map(self.to_field_map(self))
107 |
108 |
109 | @staticmethod
110 | def method():
111 | return 'POST'
112 |
113 | @staticmethod
114 | def body_fields():
115 | return '*'
116 |
117 |
118 | class ApiStartBlobUploadResponse(KaggleObject):
119 | r"""
120 | Attributes:
121 | token (str)
122 | Opaque string token used to reference the new blob/file.
123 | create_url (str)
124 | URL to use to start the upload.
125 | """
126 |
127 | def __init__(self):
128 | self._token = ""
129 | self._create_url = ""
130 | self._freeze()
131 |
132 | @property
133 | def token(self) -> str:
134 | """Opaque string token used to reference the new blob/file."""
135 | return self._token
136 |
137 | @token.setter
138 | def token(self, token: str):
139 | if token is None:
140 | del self.token
141 | return
142 | if not isinstance(token, str):
143 | raise TypeError('token must be of type str')
144 | self._token = token
145 |
146 | @property
147 | def create_url(self) -> str:
148 | """URL to use to start the upload."""
149 | return self._create_url
150 |
151 | @create_url.setter
152 | def create_url(self, create_url: str):
153 | if create_url is None:
154 | del self.create_url
155 | return
156 | if not isinstance(create_url, str):
157 | raise TypeError('create_url must be of type str')
158 | self._create_url = create_url
159 |
160 | @property
161 | def createUrl(self):
162 | return self.create_url
163 |
164 |
165 | ApiStartBlobUploadRequest._fields = [
166 | FieldMetadata("type", "type", "_type", ApiBlobType, ApiBlobType.API_BLOB_TYPE_UNSPECIFIED, EnumSerializer()),
167 | FieldMetadata("name", "name", "_name", str, "", PredefinedSerializer()),
168 | FieldMetadata("contentType", "content_type", "_content_type", str, None, PredefinedSerializer(), optional=True),
169 | FieldMetadata("contentLength", "content_length", "_content_length", int, 0, PredefinedSerializer()),
170 | FieldMetadata("lastModifiedEpochSeconds", "last_modified_epoch_seconds", "_last_modified_epoch_seconds", int, None, PredefinedSerializer(), optional=True),
171 | ]
172 |
173 | ApiStartBlobUploadResponse._fields = [
174 | FieldMetadata("token", "token", "_token", str, "", PredefinedSerializer()),
175 | FieldMetadata("createUrl", "create_url", "_create_url", str, "", PredefinedSerializer()),
176 | ]
177 |
178 |
--------------------------------------------------------------------------------
/src/kagglesdk/common/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kaggle/kaggle-api/23981a5f0f61818a29bde74d0bcd8b86a8b610e8/src/kagglesdk/common/__init__.py
--------------------------------------------------------------------------------
/src/kagglesdk/common/types/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kaggle/kaggle-api/23981a5f0f61818a29bde74d0bcd8b86a8b610e8/src/kagglesdk/common/types/__init__.py
--------------------------------------------------------------------------------
/src/kagglesdk/common/types/file_download.py:
--------------------------------------------------------------------------------
1 | from kagglesdk.kaggle_object import *
2 | from typing import Optional
3 |
4 | class FileDownload(KaggleObject):
5 | r"""
6 | Standard response object representing a file download.
7 | See http://go/kaggle-proto-handler-file-downloads
8 | Some field names/descriptions borrowed from
9 | google3/gdata/rosy/proto/data.proto
10 |
11 | Attributes:
12 | content_type (str)
13 | MIME type of the data
14 | TODO(aip.dev/143): (-- api-linter: core::0143::standardized-codes=disabled
15 | --)
16 | file_name (str)
17 | Original file name
18 | token (str)
19 | A unique fingerprint for the file/media data
20 | content_length (int)
21 | Size of the data, in bytes (if known)
22 | """
23 |
24 | def __init__(self):
25 | self._content_type = ""
26 | self._file_name = ""
27 | self._token = ""
28 | self._content_length = None
29 | self._freeze()
30 |
31 | @property
32 | def content_type(self) -> str:
33 | r"""
34 | MIME type of the data
35 | TODO(aip.dev/143): (-- api-linter: core::0143::standardized-codes=disabled
36 | --)
37 | """
38 | return self._content_type
39 |
40 | @content_type.setter
41 | def content_type(self, content_type: str):
42 | if content_type is None:
43 | del self.content_type
44 | return
45 | if not isinstance(content_type, str):
46 | raise TypeError('content_type must be of type str')
47 | self._content_type = content_type
48 |
49 | @property
50 | def file_name(self) -> str:
51 | """Original file name"""
52 | return self._file_name
53 |
54 | @file_name.setter
55 | def file_name(self, file_name: str):
56 | if file_name is None:
57 | del self.file_name
58 | return
59 | if not isinstance(file_name, str):
60 | raise TypeError('file_name must be of type str')
61 | self._file_name = file_name
62 |
63 | @property
64 | def token(self) -> str:
65 | """A unique fingerprint for the file/media data"""
66 | return self._token
67 |
68 | @token.setter
69 | def token(self, token: str):
70 | if token is None:
71 | del self.token
72 | return
73 | if not isinstance(token, str):
74 | raise TypeError('token must be of type str')
75 | self._token = token
76 |
77 | @property
78 | def content_length(self) -> int:
79 | """Size of the data, in bytes (if known)"""
80 | return self._content_length or 0
81 |
82 | @content_length.setter
83 | def content_length(self, content_length: Optional[int]):
84 | if content_length is None:
85 | del self.content_length
86 | return
87 | if not isinstance(content_length, int):
88 | raise TypeError('content_length must be of type int')
89 | self._content_length = content_length
90 |
91 | @classmethod
92 | def prepare_from(cls, http_response):
93 | return http_response
94 |
95 |
96 | FileDownload._fields = [
97 | FieldMetadata("contentType", "content_type", "_content_type", str, "", PredefinedSerializer()),
98 | FieldMetadata("fileName", "file_name", "_file_name", str, "", PredefinedSerializer()),
99 | FieldMetadata("token", "token", "_token", str, "", PredefinedSerializer()),
100 | FieldMetadata("contentLength", "content_length", "_content_length", int, None, PredefinedSerializer(), optional=True),
101 | ]
102 |
103 |
--------------------------------------------------------------------------------
/src/kagglesdk/common/types/http_redirect.py:
--------------------------------------------------------------------------------
1 | from datetime import timedelta
2 | from kagglesdk.kaggle_object import *
3 | from typing import Optional
4 |
5 | class HttpRedirect(KaggleObject):
6 | r"""
7 | Represents an HTTP redirect (e.g. 301 or 302) response.
8 | Patterned after ASP.NET MVC's RedirectResult.
9 |
10 | Attributes:
11 | url (str)
12 | Destination URL for the redirect.
13 | permanent (bool)
14 | Should it be an HTTP 301 (permanent) redirect or just temporary (HTTP
15 | 302)?.
16 | bypass_encoding (bool)
17 | When `true`, the `url` is already encoded, so bypass `UriHelper.Encode`.
18 | Otherwise, invoke `UriHelper.Encode` on the `url` before returning to the
19 | client.
20 | expiry (timedelta)
21 | Specifies how long the redirected url can be cached.
22 | """
23 |
24 | def __init__(self):
25 | self._url = ""
26 | self._permanent = False
27 | self._bypass_encoding = None
28 | self._expiry = None
29 | self._freeze()
30 |
31 | @property
32 | def url(self) -> str:
33 | """Destination URL for the redirect."""
34 | return self._url
35 |
36 | @url.setter
37 | def url(self, url: str):
38 | if url is None:
39 | del self.url
40 | return
41 | if not isinstance(url, str):
42 | raise TypeError('url must be of type str')
43 | self._url = url
44 |
45 | @property
46 | def permanent(self) -> bool:
47 | r"""
48 | Should it be an HTTP 301 (permanent) redirect or just temporary (HTTP
49 | 302)?.
50 | """
51 | return self._permanent
52 |
53 | @permanent.setter
54 | def permanent(self, permanent: bool):
55 | if permanent is None:
56 | del self.permanent
57 | return
58 | if not isinstance(permanent, bool):
59 | raise TypeError('permanent must be of type bool')
60 | self._permanent = permanent
61 |
62 | @property
63 | def bypass_encoding(self) -> bool:
64 | r"""
65 | When `true`, the `url` is already encoded, so bypass `UriHelper.Encode`.
66 | Otherwise, invoke `UriHelper.Encode` on the `url` before returning to the
67 | client.
68 | """
69 | return self._bypass_encoding or False
70 |
71 | @bypass_encoding.setter
72 | def bypass_encoding(self, bypass_encoding: Optional[bool]):
73 | if bypass_encoding is None:
74 | del self.bypass_encoding
75 | return
76 | if not isinstance(bypass_encoding, bool):
77 | raise TypeError('bypass_encoding must be of type bool')
78 | self._bypass_encoding = bypass_encoding
79 |
80 | @property
81 | def expiry(self) -> timedelta:
82 | """Specifies how long the redirected url can be cached."""
83 | return self._expiry
84 |
85 | @expiry.setter
86 | def expiry(self, expiry: timedelta):
87 | if expiry is None:
88 | del self.expiry
89 | return
90 | if not isinstance(expiry, timedelta):
91 | raise TypeError('expiry must be of type timedelta')
92 | self._expiry = expiry
93 |
94 | @classmethod
95 | def prepare_from(cls, http_response):
96 | return http_response
97 |
98 |
99 | HttpRedirect._fields = [
100 | FieldMetadata("url", "url", "_url", str, "", PredefinedSerializer()),
101 | FieldMetadata("permanent", "permanent", "_permanent", bool, False, PredefinedSerializer()),
102 | FieldMetadata("bypassEncoding", "bypass_encoding", "_bypass_encoding", bool, None, PredefinedSerializer(), optional=True),
103 | FieldMetadata("expiry", "expiry", "_expiry", timedelta, None, TimeDeltaSerializer()),
104 | ]
105 |
106 |
--------------------------------------------------------------------------------
/src/kagglesdk/competitions/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kaggle/kaggle-api/23981a5f0f61818a29bde74d0bcd8b86a8b610e8/src/kagglesdk/competitions/__init__.py
--------------------------------------------------------------------------------
/src/kagglesdk/competitions/services/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kaggle/kaggle-api/23981a5f0f61818a29bde74d0bcd8b86a8b610e8/src/kagglesdk/competitions/services/__init__.py
--------------------------------------------------------------------------------
/src/kagglesdk/competitions/services/competition_api_service.py:
--------------------------------------------------------------------------------
1 | from kagglesdk.common.types.file_download import FileDownload
2 | from kagglesdk.common.types.http_redirect import HttpRedirect
3 | from kagglesdk.competitions.types.competition_api_service import ApiCreateCodeSubmissionRequest, ApiCreateCodeSubmissionResponse, ApiCreateSubmissionRequest, ApiCreateSubmissionResponse, ApiDownloadDataFileRequest, ApiDownloadDataFilesRequest, ApiDownloadLeaderboardRequest, ApiGetLeaderboardRequest, ApiGetLeaderboardResponse, ApiGetSubmissionRequest, ApiListCompetitionsRequest, ApiListCompetitionsResponse, ApiListDataFilesRequest, ApiListDataFilesResponse, ApiListSubmissionsRequest, ApiListSubmissionsResponse, ApiStartSubmissionUploadRequest, ApiStartSubmissionUploadResponse, ApiSubmission
4 | from kagglesdk.kaggle_http_client import KaggleHttpClient
5 |
6 | class CompetitionApiClient(object):
7 |
8 | def __init__(self, client: KaggleHttpClient):
9 | self._client = client
10 |
11 | def list_competitions(self, request: ApiListCompetitionsRequest = None) -> ApiListCompetitionsResponse:
12 | r"""
13 | Args:
14 | request (ApiListCompetitionsRequest):
15 | The request object; initialized to empty instance if not specified.
16 | """
17 |
18 | if request is None:
19 | request = ApiListCompetitionsRequest()
20 |
21 | return self._client.call("competitions.CompetitionApiService", "ApiListCompetitions", request, ApiListCompetitionsResponse)
22 |
23 | def list_submissions(self, request: ApiListSubmissionsRequest = None) -> ApiListSubmissionsResponse:
24 | r"""
25 | Args:
26 | request (ApiListSubmissionsRequest):
27 | The request object; initialized to empty instance if not specified.
28 | """
29 |
30 | if request is None:
31 | request = ApiListSubmissionsRequest()
32 |
33 | return self._client.call("competitions.CompetitionApiService", "ApiListSubmissions", request, ApiListSubmissionsResponse)
34 |
35 | def list_data_files(self, request: ApiListDataFilesRequest = None) -> ApiListDataFilesResponse:
36 | r"""
37 | Args:
38 | request (ApiListDataFilesRequest):
39 | The request object; initialized to empty instance if not specified.
40 | """
41 |
42 | if request is None:
43 | request = ApiListDataFilesRequest()
44 |
45 | return self._client.call("competitions.CompetitionApiService", "ApiListDataFiles", request, ApiListDataFilesResponse)
46 |
47 | def get_leaderboard(self, request: ApiGetLeaderboardRequest = None) -> ApiGetLeaderboardResponse:
48 | r"""
49 | Args:
50 | request (ApiGetLeaderboardRequest):
51 | The request object; initialized to empty instance if not specified.
52 | """
53 |
54 | if request is None:
55 | request = ApiGetLeaderboardRequest()
56 |
57 | return self._client.call("competitions.CompetitionApiService", "ApiGetLeaderboard", request, ApiGetLeaderboardResponse)
58 |
59 | def download_leaderboard(self, request: ApiDownloadLeaderboardRequest = None) -> FileDownload:
60 | r"""
61 | Args:
62 | request (ApiDownloadLeaderboardRequest):
63 | The request object; initialized to empty instance if not specified.
64 | """
65 |
66 | if request is None:
67 | request = ApiDownloadLeaderboardRequest()
68 |
69 | return self._client.call("competitions.CompetitionApiService", "ApiDownloadLeaderboard", request, FileDownload)
70 |
71 | def create_submission(self, request: ApiCreateSubmissionRequest = None) -> ApiCreateSubmissionResponse:
72 | r"""
73 | Args:
74 | request (ApiCreateSubmissionRequest):
75 | The request object; initialized to empty instance if not specified.
76 | """
77 |
78 | if request is None:
79 | request = ApiCreateSubmissionRequest()
80 |
81 | return self._client.call("competitions.CompetitionApiService", "ApiCreateSubmission", request, ApiCreateSubmissionResponse)
82 |
83 | def create_code_submission(self, request: ApiCreateCodeSubmissionRequest = None) -> ApiCreateCodeSubmissionResponse:
84 | r"""
85 | Args:
86 | request (ApiCreateCodeSubmissionRequest):
87 | The request object; initialized to empty instance if not specified.
88 | """
89 |
90 | if request is None:
91 | request = ApiCreateCodeSubmissionRequest()
92 |
93 | return self._client.call("competitions.CompetitionApiService", "ApiCreateCodeSubmission", request, ApiCreateCodeSubmissionResponse)
94 |
95 | def get_submission(self, request: ApiGetSubmissionRequest = None) -> ApiSubmission:
96 | r"""
97 | Args:
98 | request (ApiGetSubmissionRequest):
99 | The request object; initialized to empty instance if not specified.
100 | """
101 |
102 | if request is None:
103 | request = ApiGetSubmissionRequest()
104 |
105 | return self._client.call("competitions.CompetitionApiService", "ApiGetSubmission", request, ApiSubmission)
106 |
107 | def start_submission_upload(self, request: ApiStartSubmissionUploadRequest = None) -> ApiStartSubmissionUploadResponse:
108 | r"""
109 | Args:
110 | request (ApiStartSubmissionUploadRequest):
111 | The request object; initialized to empty instance if not specified.
112 | """
113 |
114 | if request is None:
115 | request = ApiStartSubmissionUploadRequest()
116 |
117 | return self._client.call("competitions.CompetitionApiService", "ApiStartSubmissionUpload", request, ApiStartSubmissionUploadResponse)
118 |
119 | def download_data_files(self, request: ApiDownloadDataFilesRequest = None) -> HttpRedirect:
120 | r"""
121 | Args:
122 | request (ApiDownloadDataFilesRequest):
123 | The request object; initialized to empty instance if not specified.
124 | """
125 |
126 | if request is None:
127 | request = ApiDownloadDataFilesRequest()
128 |
129 | return self._client.call("competitions.CompetitionApiService", "ApiDownloadDataFiles", request, HttpRedirect)
130 |
131 | def download_data_file(self, request: ApiDownloadDataFileRequest = None) -> HttpRedirect:
132 | r"""
133 | Args:
134 | request (ApiDownloadDataFileRequest):
135 | The request object; initialized to empty instance if not specified.
136 | """
137 |
138 | if request is None:
139 | request = ApiDownloadDataFileRequest()
140 |
141 | return self._client.call("competitions.CompetitionApiService", "ApiDownloadDataFile", request, HttpRedirect)
142 |
--------------------------------------------------------------------------------
/src/kagglesdk/competitions/types/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kaggle/kaggle-api/23981a5f0f61818a29bde74d0bcd8b86a8b610e8/src/kagglesdk/competitions/types/__init__.py
--------------------------------------------------------------------------------
/src/kagglesdk/competitions/types/competition_enums.py:
--------------------------------------------------------------------------------
1 | import enum
2 |
3 | class CompetitionListTab(enum.Enum):
4 | COMPETITION_LIST_TAB_GENERAL = 0
5 | """TODO(aip.dev/126): (-- api-linter: core::0126::unspecified=disabled --)"""
6 | COMPETITION_LIST_TAB_ENTERED = 1
7 | COMPETITION_LIST_TAB_COMMUNITY = 2
8 | COMPETITION_LIST_TAB_HOSTED = 3
9 | COMPETITION_LIST_TAB_UNLAUNCHED = 4
10 | COMPETITION_LIST_TAB_UNLAUNCHED_COMMUNITY = 5
11 | COMPETITION_LIST_TAB_EVERYTHING = 6
12 |
13 | class CompetitionSortBy(enum.Enum):
14 | COMPETITION_SORT_BY_GROUPED = 0
15 | """TODO(aip.dev/126): (-- api-linter: core::0126::unspecified=disabled --)"""
16 | COMPETITION_SORT_BY_BEST = 1
17 | COMPETITION_SORT_BY_PRIZE = 2
18 | COMPETITION_SORT_BY_EARLIEST_DEADLINE = 3
19 | COMPETITION_SORT_BY_LATEST_DEADLINE = 4
20 | COMPETITION_SORT_BY_NUMBER_OF_TEAMS = 5
21 | COMPETITION_SORT_BY_RELEVANCE = 6
22 | COMPETITION_SORT_BY_RECENTLY_CREATED = 7
23 |
24 | class HostSegment(enum.Enum):
25 | r"""
26 | NOTE: Keep in Sync with Kaggle.Entities.HostSegment until migrated! Also keep
27 | the comment in
28 | competition_service.ListCompetitionsRequest.Selector.host_segment_id_filter
29 | up to date
30 | """
31 | HOST_SEGMENT_UNSPECIFIED = 0
32 | HOST_SEGMENT_FEATURED = 1
33 | HOST_SEGMENT_GETTING_STARTED = 5
34 | HOST_SEGMENT_MASTERS = 6
35 | HOST_SEGMENT_PLAYGROUND = 8
36 | HOST_SEGMENT_RECRUITMENT = 3
37 | HOST_SEGMENT_RESEARCH = 2
38 | HOST_SEGMENT_COMMUNITY = 10
39 | HOST_SEGMENT_ANALYTICS = 11
40 |
41 | class SubmissionGroup(enum.Enum):
42 | SUBMISSION_GROUP_ALL = 0
43 | """TODO(aip.dev/126): (-- api-linter: core::0126::unspecified=disabled --)"""
44 | SUBMISSION_GROUP_SUCCESSFUL = 1
45 | SUBMISSION_GROUP_SELECTED = 2
46 |
47 | class SubmissionSortBy(enum.Enum):
48 | SUBMISSION_SORT_BY_DATE = 0
49 | """TODO(aip.dev/126): (-- api-linter: core::0126::unspecified=disabled --)"""
50 | SUBMISSION_SORT_BY_NAME = 1
51 | SUBMISSION_SORT_BY_PRIVATE_SCORE = 2
52 | SUBMISSION_SORT_BY_PUBLIC_SCORE = 3
53 |
54 |
--------------------------------------------------------------------------------
/src/kagglesdk/competitions/types/submission_status.py:
--------------------------------------------------------------------------------
1 | import enum
2 |
3 | class SubmissionStatus(enum.Enum):
4 | """TODO(aip.dev/216): (-- api-linter: core::0216::synonyms=disabled --)"""
5 | PENDING = 0
6 | """TODO(aip.dev/126): (-- api-linter: core::0126::unspecified=disabled --)"""
7 | COMPLETE = 1
8 | ERROR = 2
9 |
10 |
--------------------------------------------------------------------------------
/src/kagglesdk/datasets/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kaggle/kaggle-api/23981a5f0f61818a29bde74d0bcd8b86a8b610e8/src/kagglesdk/datasets/__init__.py
--------------------------------------------------------------------------------
/src/kagglesdk/datasets/services/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kaggle/kaggle-api/23981a5f0f61818a29bde74d0bcd8b86a8b610e8/src/kagglesdk/datasets/services/__init__.py
--------------------------------------------------------------------------------
/src/kagglesdk/datasets/services/dataset_api_service.py:
--------------------------------------------------------------------------------
1 | from kagglesdk.common.types.http_redirect import HttpRedirect
2 | from kagglesdk.datasets.types.dataset_api_service import ApiCreateDatasetRequest, ApiCreateDatasetResponse, ApiCreateDatasetVersionByIdRequest, ApiCreateDatasetVersionRequest, ApiDataset, ApiDeleteDatasetRequest, ApiDeleteDatasetResponse, ApiDownloadDatasetRawRequest, ApiDownloadDatasetRequest, ApiGetDatasetMetadataRequest, ApiGetDatasetMetadataResponse, ApiGetDatasetRequest, ApiGetDatasetStatusRequest, ApiGetDatasetStatusResponse, ApiListDatasetFilesRequest, ApiListDatasetFilesResponse, ApiListDatasetsRequest, ApiListDatasetsResponse, ApiUpdateDatasetMetadataRequest, ApiUpdateDatasetMetadataResponse, ApiUploadDatasetFileRequest, ApiUploadDatasetFileResponse
3 | from kagglesdk.kaggle_http_client import KaggleHttpClient
4 |
5 | class DatasetApiClient(object):
6 |
7 | def __init__(self, client: KaggleHttpClient):
8 | self._client = client
9 |
10 | def list_datasets(self, request: ApiListDatasetsRequest = None) -> ApiListDatasetsResponse:
11 | r"""
12 | Args:
13 | request (ApiListDatasetsRequest):
14 | The request object; initialized to empty instance if not specified.
15 | """
16 |
17 | if request is None:
18 | request = ApiListDatasetsRequest()
19 |
20 | return self._client.call("datasets.DatasetApiService", "ApiListDatasets", request, ApiListDatasetsResponse)
21 |
22 | def get_dataset(self, request: ApiGetDatasetRequest = None) -> ApiDataset:
23 | r"""
24 | Args:
25 | request (ApiGetDatasetRequest):
26 | The request object; initialized to empty instance if not specified.
27 | """
28 |
29 | if request is None:
30 | request = ApiGetDatasetRequest()
31 |
32 | return self._client.call("datasets.DatasetApiService", "ApiGetDataset", request, ApiDataset)
33 |
34 | def list_dataset_files(self, request: ApiListDatasetFilesRequest = None) -> ApiListDatasetFilesResponse:
35 | r"""
36 | Args:
37 | request (ApiListDatasetFilesRequest):
38 | The request object; initialized to empty instance if not specified.
39 | """
40 |
41 | if request is None:
42 | request = ApiListDatasetFilesRequest()
43 |
44 | return self._client.call("datasets.DatasetApiService", "ApiListDatasetFiles", request, ApiListDatasetFilesResponse)
45 |
46 | def get_dataset_metadata(self, request: ApiGetDatasetMetadataRequest = None) -> ApiGetDatasetMetadataResponse:
47 | r"""
48 | Args:
49 | request (ApiGetDatasetMetadataRequest):
50 | The request object; initialized to empty instance if not specified.
51 | """
52 |
53 | if request is None:
54 | request = ApiGetDatasetMetadataRequest()
55 |
56 | return self._client.call("datasets.DatasetApiService", "ApiGetDatasetMetadata", request, ApiGetDatasetMetadataResponse)
57 |
58 | def update_dataset_metadata(self, request: ApiUpdateDatasetMetadataRequest = None) -> ApiUpdateDatasetMetadataResponse:
59 | r"""
60 | Args:
61 | request (ApiUpdateDatasetMetadataRequest):
62 | The request object; initialized to empty instance if not specified.
63 | """
64 |
65 | if request is None:
66 | request = ApiUpdateDatasetMetadataRequest()
67 |
68 | return self._client.call("datasets.DatasetApiService", "ApiUpdateDatasetMetadata", request, ApiUpdateDatasetMetadataResponse)
69 |
70 | def download_dataset(self, request: ApiDownloadDatasetRequest = None) -> HttpRedirect:
71 | r"""
72 | Args:
73 | request (ApiDownloadDatasetRequest):
74 | The request object; initialized to empty instance if not specified.
75 | """
76 |
77 | if request is None:
78 | request = ApiDownloadDatasetRequest()
79 |
80 | return self._client.call("datasets.DatasetApiService", "ApiDownloadDataset", request, HttpRedirect)
81 |
82 | def download_dataset_raw(self, request: ApiDownloadDatasetRawRequest = None) -> HttpRedirect:
83 | r"""
84 | Note: This API method has extremely low usage, and can be considered for
85 | deprecation. The existing DownloadDataset RPC, with `raw=true` set on the
86 | request, is equivalent.
87 |
88 | Args:
89 | request (ApiDownloadDatasetRawRequest):
90 | The request object; initialized to empty instance if not specified.
91 | """
92 |
93 | if request is None:
94 | request = ApiDownloadDatasetRawRequest()
95 |
96 | return self._client.call("datasets.DatasetApiService", "ApiDownloadDatasetRaw", request, HttpRedirect)
97 |
98 | def create_dataset_version(self, request: ApiCreateDatasetVersionRequest = None) -> ApiCreateDatasetResponse:
99 | r"""
100 | Args:
101 | request (ApiCreateDatasetVersionRequest):
102 | The request object; initialized to empty instance if not specified.
103 | """
104 |
105 | if request is None:
106 | request = ApiCreateDatasetVersionRequest()
107 |
108 | return self._client.call("datasets.DatasetApiService", "ApiCreateDatasetVersion", request, ApiCreateDatasetResponse)
109 |
110 | def create_dataset_version_by_id(self, request: ApiCreateDatasetVersionByIdRequest = None) -> ApiCreateDatasetResponse:
111 | r"""
112 | Args:
113 | request (ApiCreateDatasetVersionByIdRequest):
114 | The request object; initialized to empty instance if not specified.
115 | """
116 |
117 | if request is None:
118 | request = ApiCreateDatasetVersionByIdRequest()
119 |
120 | return self._client.call("datasets.DatasetApiService", "ApiCreateDatasetVersionById", request, ApiCreateDatasetResponse)
121 |
122 | def create_dataset(self, request: ApiCreateDatasetRequest = None) -> ApiCreateDatasetResponse:
123 | r"""
124 | Args:
125 | request (ApiCreateDatasetRequest):
126 | The request object; initialized to empty instance if not specified.
127 | """
128 |
129 | if request is None:
130 | request = ApiCreateDatasetRequest()
131 |
132 | return self._client.call("datasets.DatasetApiService", "ApiCreateDataset", request, ApiCreateDatasetResponse)
133 |
134 | def get_dataset_status(self, request: ApiGetDatasetStatusRequest = None) -> ApiGetDatasetStatusResponse:
135 | r"""
136 | Args:
137 | request (ApiGetDatasetStatusRequest):
138 | The request object; initialized to empty instance if not specified.
139 | """
140 |
141 | if request is None:
142 | request = ApiGetDatasetStatusRequest()
143 |
144 | return self._client.call("datasets.DatasetApiService", "ApiGetDatasetStatus", request, ApiGetDatasetStatusResponse)
145 |
146 | def upload_dataset_file(self, request: ApiUploadDatasetFileRequest = None) -> ApiUploadDatasetFileResponse:
147 | r"""
148 | Deprecated. Use the new unified BlobApiService#StartBlobUpload rpc.
149 |
150 | Args:
151 | request (ApiUploadDatasetFileRequest):
152 | The request object; initialized to empty instance if not specified.
153 | """
154 |
155 | if request is None:
156 | request = ApiUploadDatasetFileRequest()
157 |
158 | return self._client.call("datasets.DatasetApiService", "ApiUploadDatasetFile", request, ApiUploadDatasetFileResponse)
159 |
160 | def delete_dataset(self, request: ApiDeleteDatasetRequest = None) -> ApiDeleteDatasetResponse:
161 | r"""
162 | Args:
163 | request (ApiDeleteDatasetRequest):
164 | The request object; initialized to empty instance if not specified.
165 | """
166 |
167 | if request is None:
168 | request = ApiDeleteDatasetRequest()
169 |
170 | return self._client.call("datasets.DatasetApiService", "ApiDeleteDataset", request, ApiDeleteDatasetResponse)
171 |
--------------------------------------------------------------------------------
/src/kagglesdk/datasets/types/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kaggle/kaggle-api/23981a5f0f61818a29bde74d0bcd8b86a8b610e8/src/kagglesdk/datasets/types/__init__.py
--------------------------------------------------------------------------------
/src/kagglesdk/datasets/types/dataset_enums.py:
--------------------------------------------------------------------------------
1 | import enum
2 |
3 | class DatabundleVersionStatus(enum.Enum):
4 | NOT_YET_PERSISTED = 0
5 | BLOBS_RECEIVED = 1
6 | BLOBS_DECOMPRESSED = 2
7 | BLOBS_COPIED_TO_SDS = 3
8 | INDIVIDUAL_BLOBS_COMPRESSED = 4
9 | READY = 5
10 | FAILED = 6
11 | DELETED = 7
12 | REPROCESSING = 8
13 |
14 | class DatasetFileTypeGroup(enum.Enum):
15 | r"""
16 | This enum drives acceptable values from the python API, so avoid changing
17 | enum member names if possible
18 | """
19 | DATASET_FILE_TYPE_GROUP_ALL = 0
20 | DATASET_FILE_TYPE_GROUP_CSV = 1
21 | DATASET_FILE_TYPE_GROUP_SQLITE = 2
22 | DATASET_FILE_TYPE_GROUP_JSON = 3
23 | DATASET_FILE_TYPE_GROUP_BIG_QUERY = 4
24 | DATASET_FILE_TYPE_GROUP_PARQUET = 5
25 |
26 | class DatasetLicenseGroup(enum.Enum):
27 | r"""
28 | This enum drives acceptable values from the python API, so avoid changing
29 | enum member names if possible
30 | """
31 | DATASET_LICENSE_GROUP_ALL = 0
32 | DATASET_LICENSE_GROUP_CC = 1
33 | DATASET_LICENSE_GROUP_GPL = 2
34 | DATASET_LICENSE_GROUP_ODB = 3
35 | DATASET_LICENSE_GROUP_OTHER = 4
36 |
37 | class DatasetSelectionGroup(enum.Enum):
38 | DATASET_SELECTION_GROUP_PUBLIC = 0
39 | DATASET_SELECTION_GROUP_MY = 1
40 | DATASET_SELECTION_GROUP_USER = 2
41 | DATASET_SELECTION_GROUP_USER_SHARED_WITH_ME = 3
42 | DATASET_SELECTION_GROUP_UPVOTED = 4
43 | DATASET_SELECTION_GROUP_MY_PRIVATE = 5
44 | DATASET_SELECTION_GROUP_MY_PUBLIC = 10
45 | DATASET_SELECTION_GROUP_ORGANIZATION = 6
46 | DATASET_SELECTION_GROUP_BOOKMARKED = 11
47 | DATASET_SELECTION_GROUP_COLLABORATION = 12
48 | DATASET_SELECTION_GROUP_SHARED_WITH_USER = 13
49 | DATASET_SELECTION_GROUP_FEATURED = 7
50 | """Old"""
51 | DATASET_SELECTION_GROUP_ALL = 8
52 | DATASET_SELECTION_GROUP_UNFEATURED = 9
53 |
54 | class DatasetSizeGroup(enum.Enum):
55 | r"""
56 | This enum drives acceptable values from the python API, so avoid changing
57 | enum member names if possible
58 | """
59 | DATASET_SIZE_GROUP_ALL = 0
60 | DATASET_SIZE_GROUP_SMALL = 1
61 | DATASET_SIZE_GROUP_MEDIUM = 2
62 | DATASET_SIZE_GROUP_LARGE = 3
63 |
64 | class DatasetSortBy(enum.Enum):
65 | r"""
66 | This enum drives acceptable values from the python API, so avoid changing
67 | enum member names if possible
68 | """
69 | DATASET_SORT_BY_HOTTEST = 0
70 | DATASET_SORT_BY_VOTES = 1
71 | DATASET_SORT_BY_UPDATED = 2
72 | DATASET_SORT_BY_ACTIVE = 3
73 | """Deprecated"""
74 | DATASET_SORT_BY_PUBLISHED = 4
75 | DATASET_SORT_BY_RELEVANCE = 5
76 | """Old world"""
77 | DATASET_SORT_BY_LAST_VIEWED = 6
78 | DATASET_SORT_BY_USABILITY = 7
79 |
80 | class DatasetViewedGroup(enum.Enum):
81 | DATASET_VIEWED_GROUP_UNSPECIFIED = 0
82 | DATASET_VIEWED_GROUP_VIEWED = 1
83 |
84 |
--------------------------------------------------------------------------------
/src/kagglesdk/education/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kaggle/kaggle-api/23981a5f0f61818a29bde74d0bcd8b86a8b610e8/src/kagglesdk/education/__init__.py
--------------------------------------------------------------------------------
/src/kagglesdk/education/services/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kaggle/kaggle-api/23981a5f0f61818a29bde74d0bcd8b86a8b610e8/src/kagglesdk/education/services/__init__.py
--------------------------------------------------------------------------------
/src/kagglesdk/education/services/education_api_service.py:
--------------------------------------------------------------------------------
1 | from kagglesdk.education.types.education_api_service import ApiTrackExerciseInteractionRequest, ApiTrackExerciseInteractionResponse
2 | from kagglesdk.kaggle_http_client import KaggleHttpClient
3 |
4 | class EducationApiClient(object):
5 |
6 | def __init__(self, client: KaggleHttpClient):
7 | self._client = client
8 |
9 | def track_exercise_interaction(self, request: ApiTrackExerciseInteractionRequest = None) -> ApiTrackExerciseInteractionResponse:
10 | r"""
11 | Args:
12 | request (ApiTrackExerciseInteractionRequest):
13 | The request object; initialized to empty instance if not specified.
14 | """
15 |
16 | if request is None:
17 | request = ApiTrackExerciseInteractionRequest()
18 |
19 | return self._client.call("education.EducationApiService", "ApiTrackExerciseInteraction", request, ApiTrackExerciseInteractionResponse)
20 |
--------------------------------------------------------------------------------
/src/kagglesdk/education/types/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kaggle/kaggle-api/23981a5f0f61818a29bde74d0bcd8b86a8b610e8/src/kagglesdk/education/types/__init__.py
--------------------------------------------------------------------------------
/src/kagglesdk/education/types/education_service.py:
--------------------------------------------------------------------------------
1 | import enum
2 | from kagglesdk.kaggle_object import *
3 | from typing import Optional
4 |
5 | class LearnExerciseInteractionType(enum.Enum):
6 | LEARN_EXERCISE_INTERACTION_TYPE_UNSPECIFIED = 0
7 | CHECK = 1
8 | HINT = 2
9 | SOLUTION = 3
10 |
11 | class LearnExerciseOutcomeType(enum.Enum):
12 | LEARN_EXERCISE_OUTCOME_TYPE_UNSPECIFIED = 0
13 | PASS = 1
14 | FAIL = 2
15 | EXCEPTION = 3
16 | UNATTEMPTED = 4
17 |
18 | class LearnExerciseQuestionType(enum.Enum):
19 | LEARN_EXERCISE_QUESTION_TYPE_UNSPECIFIED = 0
20 | EQUALITY_CHECK_PROBLEM = 1
21 | CODING_PROBLEM = 2
22 | FUNCTION_PROBLEM = 3
23 | THOUGHT_EXPERIMENT = 4
24 |
25 | class LearnNudgeType(enum.Enum):
26 | COURSE_COMPLETE_NO_BONUS_LESSONS = 0
27 | COURSE_COMPLETE_WITH_BONUS_LESSONS = 1
28 | COURSE_INCOMPLETE = 2
29 | DO_EXERCISE = 3
30 | DO_TUTORIAL = 4
31 |
32 | class LearnNudge(KaggleObject):
33 | r"""
34 | Attributes:
35 | course_index (int)
36 | course_name (str)
37 | course_slug (str)
38 | next_item_name (str)
39 | next_item_url (str)
40 | next_item_type (LearnNudgeType)
41 | """
42 |
43 | def __init__(self):
44 | self._course_index = 0
45 | self._course_name = ""
46 | self._course_slug = ""
47 | self._next_item_name = ""
48 | self._next_item_url = ""
49 | self._next_item_type = LearnNudgeType.COURSE_COMPLETE_NO_BONUS_LESSONS
50 | self._freeze()
51 |
52 | @property
53 | def course_index(self) -> int:
54 | return self._course_index
55 |
56 | @course_index.setter
57 | def course_index(self, course_index: int):
58 | if course_index is None:
59 | del self.course_index
60 | return
61 | if not isinstance(course_index, int):
62 | raise TypeError('course_index must be of type int')
63 | self._course_index = course_index
64 |
65 | @property
66 | def course_name(self) -> str:
67 | return self._course_name
68 |
69 | @course_name.setter
70 | def course_name(self, course_name: str):
71 | if course_name is None:
72 | del self.course_name
73 | return
74 | if not isinstance(course_name, str):
75 | raise TypeError('course_name must be of type str')
76 | self._course_name = course_name
77 |
78 | @property
79 | def course_slug(self) -> str:
80 | return self._course_slug
81 |
82 | @course_slug.setter
83 | def course_slug(self, course_slug: str):
84 | if course_slug is None:
85 | del self.course_slug
86 | return
87 | if not isinstance(course_slug, str):
88 | raise TypeError('course_slug must be of type str')
89 | self._course_slug = course_slug
90 |
91 | @property
92 | def next_item_name(self) -> str:
93 | return self._next_item_name
94 |
95 | @next_item_name.setter
96 | def next_item_name(self, next_item_name: str):
97 | if next_item_name is None:
98 | del self.next_item_name
99 | return
100 | if not isinstance(next_item_name, str):
101 | raise TypeError('next_item_name must be of type str')
102 | self._next_item_name = next_item_name
103 |
104 | @property
105 | def next_item_url(self) -> str:
106 | return self._next_item_url
107 |
108 | @next_item_url.setter
109 | def next_item_url(self, next_item_url: str):
110 | if next_item_url is None:
111 | del self.next_item_url
112 | return
113 | if not isinstance(next_item_url, str):
114 | raise TypeError('next_item_url must be of type str')
115 | self._next_item_url = next_item_url
116 |
117 | @property
118 | def next_item_type(self) -> 'LearnNudgeType':
119 | return self._next_item_type
120 |
121 | @next_item_type.setter
122 | def next_item_type(self, next_item_type: 'LearnNudgeType'):
123 | if next_item_type is None:
124 | del self.next_item_type
125 | return
126 | if not isinstance(next_item_type, LearnNudgeType):
127 | raise TypeError('next_item_type must be of type LearnNudgeType')
128 | self._next_item_type = next_item_type
129 |
130 |
131 | LearnNudge._fields = [
132 | FieldMetadata("courseIndex", "course_index", "_course_index", int, 0, PredefinedSerializer()),
133 | FieldMetadata("courseName", "course_name", "_course_name", str, "", PredefinedSerializer()),
134 | FieldMetadata("courseSlug", "course_slug", "_course_slug", str, "", PredefinedSerializer()),
135 | FieldMetadata("nextItemName", "next_item_name", "_next_item_name", str, "", PredefinedSerializer()),
136 | FieldMetadata("nextItemUrl", "next_item_url", "_next_item_url", str, "", PredefinedSerializer()),
137 | FieldMetadata("nextItemType", "next_item_type", "_next_item_type", LearnNudgeType, LearnNudgeType.COURSE_COMPLETE_NO_BONUS_LESSONS, EnumSerializer()),
138 | ]
139 |
140 |
--------------------------------------------------------------------------------
/src/kagglesdk/kaggle_client.py:
--------------------------------------------------------------------------------
1 | from kagglesdk.kernels.services.kernels_api_service import KernelsApiClient
2 | from kagglesdk.blobs.services.blob_api_service import BlobApiClient
3 | from kagglesdk.education.services.education_api_service import EducationApiClient
4 | from kagglesdk.models.services.model_api_service import ModelApiClient
5 | from kagglesdk.models.services.model_service import ModelClient
6 | from kagglesdk.competitions.services.competition_api_service import CompetitionApiClient
7 | from kagglesdk.datasets.services.dataset_api_service import DatasetApiClient
8 | from kagglesdk.admin.services.inbox_file_service import InboxFileClient
9 | from kagglesdk.security.services.oauth_service import OAuthClient
10 | from kagglesdk.users.services.account_service import AccountClient
11 | from kagglesdk.kaggle_env import KaggleEnv
12 | from kagglesdk.kaggle_http_client import KaggleHttpClient
13 |
14 |
15 | class KaggleClient(object):
16 | class Kernels(object):
17 | def __init__(self, http_client: KaggleHttpClient):
18 | self.kernels_api_client = KernelsApiClient(http_client)
19 |
20 | class Blobs(object):
21 | def __init__(self, http_client: KaggleHttpClient):
22 | self.blob_api_client = BlobApiClient(http_client)
23 |
24 | class Education(object):
25 | def __init__(self, http_client: KaggleHttpClient):
26 | self.education_api_client = EducationApiClient(http_client)
27 |
28 | class Models(object):
29 | def __init__(self, http_client: KaggleHttpClient):
30 | self.model_api_client = ModelApiClient(http_client)
31 | self.model_client = ModelClient(http_client)
32 |
33 | class Competitions(object):
34 | def __init__(self, http_client: KaggleHttpClient):
35 | self.competition_api_client = CompetitionApiClient(http_client)
36 |
37 | class Datasets(object):
38 | def __init__(self, http_client: KaggleHttpClient):
39 | self.dataset_api_client = DatasetApiClient(http_client)
40 |
41 | class Admin(object):
42 | def __init__(self, http_client: KaggleHttpClient):
43 | self.inbox_file_client = InboxFileClient(http_client)
44 |
45 | class Security(object):
46 | def __init__(self, http_client: KaggleHttpClient):
47 | self.oauth_client = OAuthClient(http_client)
48 |
49 | class Users(object):
50 | def __init__(self, http_client: KaggleHttpClient):
51 | self.account_client = AccountClient(http_client)
52 |
53 | def __init__(self, env: KaggleEnv = None, verbose: bool = False, username: str = None, password: str = None):
54 | self._http_client = http_client = KaggleHttpClient(env, verbose, self._renew_iap_token, username=username, password=password)
55 | self.kernels = KaggleClient.Kernels(http_client)
56 | self.blobs = KaggleClient.Blobs(http_client)
57 | self.education = KaggleClient.Education(http_client)
58 | self.models = KaggleClient.Models(http_client)
59 | self.competitions = KaggleClient.Competitions(http_client)
60 | self.datasets = KaggleClient.Datasets(http_client)
61 | self.admin = KaggleClient.Admin(http_client)
62 | self.security = KaggleClient.Security(http_client)
63 | self.users = KaggleClient.Users(http_client)
64 | self.username = username
65 | self.password = password
66 |
67 | def http_client(self):
68 | return self._http_client
69 |
70 | def _renew_iap_token(self):
71 | return self.admin.admin_client.renew_iap_token()
72 |
73 | def __enter__(self):
74 | self._http_client.__enter__()
75 | return self
76 |
77 | def __exit__(self, exc_type, exc_value, tb):
78 | self._http_client.__exit__(exc_type, exc_value, tb)
79 |
--------------------------------------------------------------------------------
/src/kagglesdk/kaggle_env.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import os
3 | from enum import Enum
4 |
5 | KAGGLE_NOTEBOOK_ENV_VAR_NAME = "KAGGLE_KERNEL_RUN_TYPE"
6 | KAGGLE_DATA_PROXY_URL_ENV_VAR_NAME = "KAGGLE_DATA_PROXY_URL"
7 | KAGGLE_API_V1_TOKEN_PATH = "KAGGLE_API_V1_TOKEN"
8 |
9 | logger = logging.getLogger(__name__)
10 |
11 | class KaggleEnv(Enum):
12 | LOCAL = 0 # localhost
13 | STAGING = 1 # staging.kaggle.com
14 | ADMIN = 2 # admin.kaggle.com
15 | QA = 3 # qa.kaggle.com
16 | # Direct prod access is not allowed to have IAP protection during testing, but we support basic auth.
17 | PROD = 4 # www.kaggle.com
18 |
19 |
20 | _env_to_endpoint = {
21 | KaggleEnv.LOCAL: 'http://localhost',
22 | KaggleEnv.STAGING: 'https://staging.kaggle.com',
23 | KaggleEnv.ADMIN: 'https://admin.kaggle.com',
24 | KaggleEnv.QA: 'https://qa.kaggle.com',
25 | # See the comment above in KaggleEnv enum.
26 | KaggleEnv.PROD: 'https://www.kaggle.com',
27 | }
28 |
29 |
30 | def get_endpoint(env: KaggleEnv):
31 | return _env_to_endpoint[env]
32 |
33 |
34 | def get_env():
35 | env = os.getenv('KAGGLE_API_ENVIRONMENT')
36 | if env is None or env == 'PROD':
37 | return KaggleEnv.PROD
38 | if env == 'LOCALHOST':
39 | return KaggleEnv.LOCAL
40 | if env == 'ADMIN':
41 | return KaggleEnv.ADMIN
42 | if env == 'STAGING':
43 | return KaggleEnv.STAGING
44 | if env == 'QA':
45 | return KaggleEnv.QA
46 | raise Exception(f'Unrecognized value in KAGGLE_API_ENVIRONMENT: "{env}"')
47 |
48 |
49 | def is_in_kaggle_notebook() -> bool:
50 | if os.getenv(KAGGLE_NOTEBOOK_ENV_VAR_NAME) is not None:
51 | if os.getenv(KAGGLE_DATA_PROXY_URL_ENV_VAR_NAME) is None:
52 | # Missing endpoint for the Jwt client
53 | logger.warning(
54 | "Can't use the Kaggle Cache. "
55 | f"The '{KAGGLE_DATA_PROXY_URL_ENV_VAR_NAME}' environment variable is not set."
56 | )
57 | return False
58 | return True
59 | return False
60 |
--------------------------------------------------------------------------------
/src/kagglesdk/kernels/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kaggle/kaggle-api/23981a5f0f61818a29bde74d0bcd8b86a8b610e8/src/kagglesdk/kernels/__init__.py
--------------------------------------------------------------------------------
/src/kagglesdk/kernels/services/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kaggle/kaggle-api/23981a5f0f61818a29bde74d0bcd8b86a8b610e8/src/kagglesdk/kernels/services/__init__.py
--------------------------------------------------------------------------------
/src/kagglesdk/kernels/services/kernels_api_service.py:
--------------------------------------------------------------------------------
1 | from kagglesdk.common.types.file_download import FileDownload
2 | from kagglesdk.common.types.http_redirect import HttpRedirect
3 | from kagglesdk.kaggle_http_client import KaggleHttpClient
4 | from kagglesdk.kernels.types.kernels_api_service import ApiDeleteKernelRequest, ApiDeleteKernelResponse, ApiDownloadKernelOutputRequest, ApiDownloadKernelOutputZipRequest, ApiGetKernelRequest, ApiGetKernelResponse, ApiGetKernelSessionStatusRequest, ApiGetKernelSessionStatusResponse, ApiListKernelFilesRequest, ApiListKernelFilesResponse, ApiListKernelSessionOutputRequest, ApiListKernelSessionOutputResponse, ApiListKernelsRequest, ApiListKernelsResponse, ApiSaveKernelRequest, ApiSaveKernelResponse
5 |
6 | class KernelsApiClient(object):
7 |
8 | def __init__(self, client: KaggleHttpClient):
9 | self._client = client
10 |
11 | def list_kernels(self, request: ApiListKernelsRequest = None) -> ApiListKernelsResponse:
12 | r"""
13 | Args:
14 | request (ApiListKernelsRequest):
15 | The request object; initialized to empty instance if not specified.
16 | """
17 |
18 | if request is None:
19 | request = ApiListKernelsRequest()
20 |
21 | return self._client.call("kernels.KernelsApiService", "ApiListKernels", request, ApiListKernelsResponse)
22 |
23 | def list_kernel_files(self, request: ApiListKernelFilesRequest = None) -> ApiListKernelFilesResponse:
24 | r"""
25 | Args:
26 | request (ApiListKernelFilesRequest):
27 | The request object; initialized to empty instance if not specified.
28 | """
29 |
30 | if request is None:
31 | request = ApiListKernelFilesRequest()
32 |
33 | return self._client.call("kernels.KernelsApiService", "ApiListKernelFiles", request, ApiListKernelFilesResponse)
34 |
35 | def get_kernel(self, request: ApiGetKernelRequest = None) -> ApiGetKernelResponse:
36 | r"""
37 | Args:
38 | request (ApiGetKernelRequest):
39 | The request object; initialized to empty instance if not specified.
40 | """
41 |
42 | if request is None:
43 | request = ApiGetKernelRequest()
44 |
45 | return self._client.call("kernels.KernelsApiService", "ApiGetKernel", request, ApiGetKernelResponse)
46 |
47 | def save_kernel(self, request: ApiSaveKernelRequest = None) -> ApiSaveKernelResponse:
48 | r"""
49 | Args:
50 | request (ApiSaveKernelRequest):
51 | The request object; initialized to empty instance if not specified.
52 | """
53 |
54 | if request is None:
55 | request = ApiSaveKernelRequest()
56 |
57 | return self._client.call("kernels.KernelsApiService", "ApiSaveKernel", request, ApiSaveKernelResponse)
58 |
59 | def list_kernel_session_output(self, request: ApiListKernelSessionOutputRequest = None) -> ApiListKernelSessionOutputResponse:
60 | r"""
61 | Args:
62 | request (ApiListKernelSessionOutputRequest):
63 | The request object; initialized to empty instance if not specified.
64 | """
65 |
66 | if request is None:
67 | request = ApiListKernelSessionOutputRequest()
68 |
69 | return self._client.call("kernels.KernelsApiService", "ApiListKernelSessionOutput", request, ApiListKernelSessionOutputResponse)
70 |
71 | def get_kernel_session_status(self, request: ApiGetKernelSessionStatusRequest = None) -> ApiGetKernelSessionStatusResponse:
72 | r"""
73 | Args:
74 | request (ApiGetKernelSessionStatusRequest):
75 | The request object; initialized to empty instance if not specified.
76 | """
77 |
78 | if request is None:
79 | request = ApiGetKernelSessionStatusRequest()
80 |
81 | return self._client.call("kernels.KernelsApiService", "ApiGetKernelSessionStatus", request, ApiGetKernelSessionStatusResponse)
82 |
83 | def download_kernel_output(self, request: ApiDownloadKernelOutputRequest = None) -> HttpRedirect:
84 | r"""
85 | Meant for use by Kaggle Hub (http bindings and terminology align with that)
86 |
87 | Args:
88 | request (ApiDownloadKernelOutputRequest):
89 | The request object; initialized to empty instance if not specified.
90 | """
91 |
92 | if request is None:
93 | request = ApiDownloadKernelOutputRequest()
94 |
95 | return self._client.call("kernels.KernelsApiService", "ApiDownloadKernelOutput", request, HttpRedirect)
96 |
97 | def download_kernel_output_zip(self, request: ApiDownloadKernelOutputZipRequest = None) -> FileDownload:
98 | r"""
99 | Meant for use by Kaggle Hub (and DownloadKernelOutput above)
100 |
101 | Args:
102 | request (ApiDownloadKernelOutputZipRequest):
103 | The request object; initialized to empty instance if not specified.
104 | """
105 |
106 | if request is None:
107 | request = ApiDownloadKernelOutputZipRequest()
108 |
109 | return self._client.call("kernels.KernelsApiService", "ApiDownloadKernelOutputZip", request, FileDownload)
110 |
111 | def delete_kernel(self, request: ApiDeleteKernelRequest = None) -> ApiDeleteKernelResponse:
112 | r"""
113 | Args:
114 | request (ApiDeleteKernelRequest):
115 | The request object; initialized to empty instance if not specified.
116 | """
117 |
118 | if request is None:
119 | request = ApiDeleteKernelRequest()
120 |
121 | return self._client.call("kernels.KernelsApiService", "ApiDeleteKernel", request, ApiDeleteKernelResponse)
122 |
--------------------------------------------------------------------------------
/src/kagglesdk/kernels/types/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kaggle/kaggle-api/23981a5f0f61818a29bde74d0bcd8b86a8b610e8/src/kagglesdk/kernels/types/__init__.py
--------------------------------------------------------------------------------
/src/kagglesdk/kernels/types/kernels_enums.py:
--------------------------------------------------------------------------------
1 | import enum
2 |
3 | class KernelExecutionType(enum.Enum):
4 | KERNEL_EXECUTION_TYPE_UNSPECIFIED = 0
5 | SAVE_AND_RUN_ALL = 1
6 | INTER_ACTIVE = 2
7 | QUICK_SAVE = 3
8 |
9 | class KernelsListSortType(enum.Enum):
10 | HOTNESS = 0
11 | COMMENT_COUNT = 1
12 | DATE_CREATED = 2
13 | DATE_RUN = 3
14 | RELEVANCE = 4
15 | SCORE_ASCENDING = 5
16 | SCORE_DESCENDING = 6
17 | VIEW_COUNT = 7
18 | VOTE_COUNT = 8
19 |
20 | class KernelsListViewType(enum.Enum):
21 | KERNELS_LIST_VIEW_TYPE_UNSPECIFIED = 0
22 | PROFILE = 1
23 | UPVOTED = 2
24 | EVERYONE = 3
25 | COLLABORATION = 4
26 | FORK = 5
27 | BOOKMARKED = 6
28 | RECENTLY_VIEWED = 7
29 | PUBLIC_AND_USERS_PRIVATE = 8
30 |
31 | class KernelWorkerStatus(enum.Enum):
32 | QUEUED = 0
33 | RUNNING = 1
34 | COMPLETE = 2
35 | ERROR = 3
36 | CANCEL_REQUESTED = 4
37 | CANCEL_ACKNOWLEDGED = 5
38 | NEW_SCRIPT = 6
39 |
40 |
--------------------------------------------------------------------------------
/src/kagglesdk/models/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kaggle/kaggle-api/23981a5f0f61818a29bde74d0bcd8b86a8b610e8/src/kagglesdk/models/__init__.py
--------------------------------------------------------------------------------
/src/kagglesdk/models/services/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kaggle/kaggle-api/23981a5f0f61818a29bde74d0bcd8b86a8b610e8/src/kagglesdk/models/services/__init__.py
--------------------------------------------------------------------------------
/src/kagglesdk/models/services/model_service.py:
--------------------------------------------------------------------------------
1 | from kagglesdk.kaggle_http_client import KaggleHttpClient
2 | from kagglesdk.models.types.model_service import GetModelMetricsRequest, GetModelMetricsResponse
3 |
4 | class ModelClient(object):
5 |
6 | def __init__(self, client: KaggleHttpClient):
7 | self._client = client
8 |
9 | def get_model_metrics(self, request: GetModelMetricsRequest = None) -> GetModelMetricsResponse:
10 | r"""
11 | Args:
12 | request (GetModelMetricsRequest):
13 | The request object; initialized to empty instance if not specified.
14 | """
15 |
16 | if request is None:
17 | request = GetModelMetricsRequest()
18 |
19 | return self._client.call("models.ModelService", "GetModelMetrics", request, GetModelMetricsResponse)
20 |
--------------------------------------------------------------------------------
/src/kagglesdk/models/types/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kaggle/kaggle-api/23981a5f0f61818a29bde74d0bcd8b86a8b610e8/src/kagglesdk/models/types/__init__.py
--------------------------------------------------------------------------------
/src/kagglesdk/models/types/model_enums.py:
--------------------------------------------------------------------------------
1 | import enum
2 |
3 | class GatingAgreementRequestsReviewStatus(enum.Enum):
4 | GATING_AGREEMENT_REQUESTS_REVIEW_STATUS_UNSPECIFIED = 0
5 | GATING_AGREEMENT_REQUESTS_REVIEW_STATUS_PENDING = 1
6 | GATING_AGREEMENT_REQUESTS_REVIEW_STATUS_ACCEPTED = 2
7 | GATING_AGREEMENT_REQUESTS_REVIEW_STATUS_REJECTED = 3
8 |
9 | class ListModelsOrderBy(enum.Enum):
10 | LIST_MODELS_ORDER_BY_UNSPECIFIED = 0
11 | LIST_MODELS_ORDER_BY_HOTNESS = 1
12 | LIST_MODELS_ORDER_BY_DOWNLOAD_COUNT = 2
13 | LIST_MODELS_ORDER_BY_VOTE_COUNT = 3
14 | LIST_MODELS_ORDER_BY_NOTEBOOK_COUNT = 4
15 | LIST_MODELS_ORDER_BY_PUBLISH_TIME = 5
16 | LIST_MODELS_ORDER_BY_CREATE_TIME = 6
17 | LIST_MODELS_ORDER_BY_UPDATE_TIME = 7
18 | LIST_MODELS_ORDER_BY_VIEW_TIME_DESC = 8
19 |
20 | class ModelFramework(enum.Enum):
21 | MODEL_FRAMEWORK_UNSPECIFIED = 0
22 | MODEL_FRAMEWORK_TENSOR_FLOW_1 = 1
23 | MODEL_FRAMEWORK_TENSOR_FLOW_2 = 2
24 | MODEL_FRAMEWORK_TF_LITE = 3
25 | MODEL_FRAMEWORK_TF_JS = 4
26 | MODEL_FRAMEWORK_PY_TORCH = 5
27 | MODEL_FRAMEWORK_JAX = 6
28 | MODEL_FRAMEWORK_FLAX = 14
29 | MODEL_FRAMEWORK_PAX = 15
30 | MODEL_FRAMEWORK_MAX_TEXT = 17
31 | MODEL_FRAMEWORK_GEMMA_CPP = 18
32 | MODEL_FRAMEWORK_GGML = 19
33 | MODEL_FRAMEWORK_GGUF = 21
34 | MODEL_FRAMEWORK_CORAL = 7
35 | MODEL_FRAMEWORK_SCIKIT_LEARN = 8
36 | MODEL_FRAMEWORK_MXNET = 9
37 | MODEL_FRAMEWORK_ONNX = 10
38 | MODEL_FRAMEWORK_KERAS = 11
39 | MODEL_FRAMEWORK_TRANSFORMERS = 16
40 | MODEL_FRAMEWORK_API = 12
41 | MODEL_FRAMEWORK_OTHER = 13
42 | MODEL_FRAMEWORK_TENSOR_RT_LLM = 20
43 | MODEL_FRAMEWORK_TRITON = 22
44 |
45 | class ModelInstanceType(enum.Enum):
46 | MODEL_INSTANCE_TYPE_UNSPECIFIED = 0
47 | MODEL_INSTANCE_TYPE_BASE_MODEL = 1
48 | MODEL_INSTANCE_TYPE_KAGGLE_VARIANT = 2
49 | MODEL_INSTANCE_TYPE_EXTERNAL_VARIANT = 3
50 |
51 | class ModelVersionLinkType(enum.Enum):
52 | MODEL_VERSION_LINK_TYPE_UNSPECIFIED = 0
53 | MODEL_VERSION_LINK_TYPE_VERTEX_OPEN = 1
54 | MODEL_VERSION_LINK_TYPE_VERTEX_DEPLOY = 2
55 |
56 | class GatingAgreementRequestsExpiryStatus(enum.Enum):
57 | GATING_AGREEMENT_REQUESTS_EXPIRY_STATUS_UNSPECIFIED = 0
58 | GATING_AGREEMENT_REQUESTS_EXPIRY_STATUS_NOT_EXPIRED = 1
59 | GATING_AGREEMENT_REQUESTS_EXPIRY_STATUS_IS_EXPIRED = 2
60 |
61 |
--------------------------------------------------------------------------------
/src/kagglesdk/security/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kaggle/kaggle-api/23981a5f0f61818a29bde74d0bcd8b86a8b610e8/src/kagglesdk/security/__init__.py
--------------------------------------------------------------------------------
/src/kagglesdk/security/services/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kaggle/kaggle-api/23981a5f0f61818a29bde74d0bcd8b86a8b610e8/src/kagglesdk/security/services/__init__.py
--------------------------------------------------------------------------------
/src/kagglesdk/security/services/oauth_service.py:
--------------------------------------------------------------------------------
1 | from kagglesdk.common.types.http_redirect import HttpRedirect
2 | from kagglesdk.kaggle_http_client import KaggleHttpClient
3 | from kagglesdk.security.types.oauth_service import ExchangeOAuthTokenRequest, ExchangeOAuthTokenResponse, IntrospectTokenRequest, IntrospectTokenResponse, StartOAuthFlowRequest
4 |
5 | class OAuthClient(object):
6 |
7 | def __init__(self, client: KaggleHttpClient):
8 | self._client = client
9 |
10 | def start_oauth_flow(self, request: StartOAuthFlowRequest = None) -> HttpRedirect:
11 | r"""
12 | Args:
13 | request (StartOAuthFlowRequest):
14 | The request object; initialized to empty instance if not specified.
15 | """
16 |
17 | if request is None:
18 | request = StartOAuthFlowRequest()
19 |
20 | return self._client.call("security.OAuthService", "StartOAuthFlow", request, HttpRedirect)
21 |
22 | def exchange_oauth_token(self, request: ExchangeOAuthTokenRequest = None) -> ExchangeOAuthTokenResponse:
23 | r"""
24 | Args:
25 | request (ExchangeOAuthTokenRequest):
26 | The request object; initialized to empty instance if not specified.
27 | """
28 |
29 | if request is None:
30 | request = ExchangeOAuthTokenRequest()
31 |
32 | return self._client.call("security.OAuthService", "ExchangeOAuthToken", request, ExchangeOAuthTokenResponse)
33 |
34 | def introspect_token(self, request: IntrospectTokenRequest = None) -> IntrospectTokenResponse:
35 | r"""
36 | Args:
37 | request (IntrospectTokenRequest):
38 | The request object; initialized to empty instance if not specified.
39 | """
40 |
41 | if request is None:
42 | request = IntrospectTokenRequest()
43 |
44 | return self._client.call("security.OAuthService", "IntrospectToken", request, IntrospectTokenResponse)
45 |
--------------------------------------------------------------------------------
/src/kagglesdk/security/types/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kaggle/kaggle-api/23981a5f0f61818a29bde74d0bcd8b86a8b610e8/src/kagglesdk/security/types/__init__.py
--------------------------------------------------------------------------------
/src/kagglesdk/security/types/authentication.py:
--------------------------------------------------------------------------------
1 | from kagglesdk.kaggle_object import *
2 | from typing import Optional, List
3 |
4 | class AuthorizationScope(KaggleObject):
5 | r"""
6 | Attributes:
7 | resource_id (int)
8 | permission (AuthorizationPermissionScope)
9 | role (AuthorizationRoleScope)
10 | """
11 |
12 | def __init__(self):
13 | self._resource_id = 0
14 | self._permission = None
15 | self._role = None
16 | self._freeze()
17 |
18 | @property
19 | def resource_id(self) -> int:
20 | return self._resource_id
21 |
22 | @resource_id.setter
23 | def resource_id(self, resource_id: int):
24 | if resource_id is None:
25 | del self.resource_id
26 | return
27 | if not isinstance(resource_id, int):
28 | raise TypeError('resource_id must be of type int')
29 | self._resource_id = resource_id
30 |
31 | @property
32 | def permission(self) -> Optional['AuthorizationPermissionScope']:
33 | return self._permission or None
34 |
35 | @permission.setter
36 | def permission(self, permission: Optional['AuthorizationPermissionScope']):
37 | if permission is None:
38 | del self.permission
39 | return
40 | if not isinstance(permission, AuthorizationPermissionScope):
41 | raise TypeError('permission must be of type AuthorizationPermissionScope')
42 | del self.role
43 | self._permission = permission
44 |
45 | @property
46 | def role(self) -> Optional['AuthorizationRoleScope']:
47 | return self._role or None
48 |
49 | @role.setter
50 | def role(self, role: Optional['AuthorizationRoleScope']):
51 | if role is None:
52 | del self.role
53 | return
54 | if not isinstance(role, AuthorizationRoleScope):
55 | raise TypeError('role must be of type AuthorizationRoleScope')
56 | del self.permission
57 | self._role = role
58 |
59 |
60 | class AuthorizationPermissionScope(KaggleObject):
61 | r"""
62 | Attributes:
63 | name (str)
64 | description (str)
65 | """
66 |
67 | def __init__(self):
68 | self._name = ""
69 | self._description = None
70 | self._freeze()
71 |
72 | @property
73 | def name(self) -> str:
74 | return self._name
75 |
76 | @name.setter
77 | def name(self, name: str):
78 | if name is None:
79 | del self.name
80 | return
81 | if not isinstance(name, str):
82 | raise TypeError('name must be of type str')
83 | self._name = name
84 |
85 | @property
86 | def description(self) -> str:
87 | return self._description or ""
88 |
89 | @description.setter
90 | def description(self, description: Optional[str]):
91 | if description is None:
92 | del self.description
93 | return
94 | if not isinstance(description, str):
95 | raise TypeError('description must be of type str')
96 | self._description = description
97 |
98 |
99 | class AuthorizationRoleScope(KaggleObject):
100 | r"""
101 | Attributes:
102 | name (str)
103 | description (str)
104 | permissions (AuthorizationPermissionScope)
105 | """
106 |
107 | def __init__(self):
108 | self._name = ""
109 | self._description = None
110 | self._permissions = []
111 | self._freeze()
112 |
113 | @property
114 | def name(self) -> str:
115 | return self._name
116 |
117 | @name.setter
118 | def name(self, name: str):
119 | if name is None:
120 | del self.name
121 | return
122 | if not isinstance(name, str):
123 | raise TypeError('name must be of type str')
124 | self._name = name
125 |
126 | @property
127 | def description(self) -> str:
128 | return self._description or ""
129 |
130 | @description.setter
131 | def description(self, description: Optional[str]):
132 | if description is None:
133 | del self.description
134 | return
135 | if not isinstance(description, str):
136 | raise TypeError('description must be of type str')
137 | self._description = description
138 |
139 | @property
140 | def permissions(self) -> Optional[List[Optional['AuthorizationPermissionScope']]]:
141 | return self._permissions
142 |
143 | @permissions.setter
144 | def permissions(self, permissions: Optional[List[Optional['AuthorizationPermissionScope']]]):
145 | if permissions is None:
146 | del self.permissions
147 | return
148 | if not isinstance(permissions, list):
149 | raise TypeError('permissions must be of type list')
150 | if not all([isinstance(t, AuthorizationPermissionScope) for t in permissions]):
151 | raise TypeError('permissions must contain only items of type AuthorizationPermissionScope')
152 | self._permissions = permissions
153 |
154 |
155 | AuthorizationScope._fields = [
156 | FieldMetadata("resourceId", "resource_id", "_resource_id", int, 0, PredefinedSerializer()),
157 | FieldMetadata("permission", "permission", "_permission", AuthorizationPermissionScope, None, KaggleObjectSerializer(), optional=True),
158 | FieldMetadata("role", "role", "_role", AuthorizationRoleScope, None, KaggleObjectSerializer(), optional=True),
159 | ]
160 |
161 | AuthorizationPermissionScope._fields = [
162 | FieldMetadata("name", "name", "_name", str, "", PredefinedSerializer()),
163 | FieldMetadata("description", "description", "_description", str, None, PredefinedSerializer(), optional=True),
164 | ]
165 |
166 | AuthorizationRoleScope._fields = [
167 | FieldMetadata("name", "name", "_name", str, "", PredefinedSerializer()),
168 | FieldMetadata("description", "description", "_description", str, None, PredefinedSerializer(), optional=True),
169 | FieldMetadata("permissions", "permissions", "_permissions", AuthorizationPermissionScope, [], ListSerializer(KaggleObjectSerializer())),
170 | ]
171 |
172 |
--------------------------------------------------------------------------------
/src/kagglesdk/test/test_client.py:
--------------------------------------------------------------------------------
1 | from kagglesdk import kaggle_env
2 | from kagglesdk import KaggleClient, KaggleEnv
3 |
4 | # python -m unittest tests.test_authenticate
5 |
6 | import os
7 | import unittest
8 |
9 |
10 | class TestClient(unittest.TestCase):
11 |
12 | def setUp(self):
13 | print('setup class:%s' % self)
14 |
15 | def tearDown(self):
16 | print('teardown class:TestStuff')
17 |
18 | # Environment
19 |
20 | def test_kaggle_environment(self):
21 | os.environ['KAGGLE_API_ENVIRONMENT'] = 'PROD'
22 |
23 | env = kaggle_env.get_env()
24 | self.assertEqual(env, KaggleEnv.PROD)
25 |
26 | endpoint = kaggle_env.get_endpoint(env)
27 | self.assertEqual(endpoint, 'https://www.kaggle.com')
28 |
29 | # Client
30 |
31 | def test_kaggle_client(self):
32 | client = KaggleClient(
33 | env=KaggleEnv.PROD, verbose=False, username='dinosaur', password='xxxxxxxxxxxx'
34 | )
35 |
36 | self.assertEqual(client.username, 'dinosaur')
37 | self.assertEqual(client.password, 'xxxxxxxxxxxx')
38 | self.assertEqual(client.http_client()._endpoint, 'https://www.kaggle.com')
39 | self.assertEqual(client.http_client()._verbose, False)
40 |
41 |
42 | if __name__ == '__main__':
43 | unittest.main()
44 |
--------------------------------------------------------------------------------
/src/kagglesdk/users/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kaggle/kaggle-api/23981a5f0f61818a29bde74d0bcd8b86a8b610e8/src/kagglesdk/users/__init__.py
--------------------------------------------------------------------------------
/src/kagglesdk/users/services/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kaggle/kaggle-api/23981a5f0f61818a29bde74d0bcd8b86a8b610e8/src/kagglesdk/users/services/__init__.py
--------------------------------------------------------------------------------
/src/kagglesdk/users/services/account_service.py:
--------------------------------------------------------------------------------
1 | from kagglesdk.kaggle_http_client import KaggleHttpClient
2 | from kagglesdk.users.types.account_service import GenerateAccessTokenRequest, GenerateAccessTokenResponse
3 |
4 | class AccountClient(object):
5 |
6 | def __init__(self, client: KaggleHttpClient):
7 | self._client = client
8 |
9 | def generate_access_token(self, request: GenerateAccessTokenRequest = None) -> GenerateAccessTokenResponse:
10 | r"""
11 | Args:
12 | request (GenerateAccessTokenRequest):
13 | The request object; initialized to empty instance if not specified.
14 | """
15 |
16 | if request is None:
17 | request = GenerateAccessTokenRequest()
18 |
19 | return self._client.call("users.AccountService", "GenerateAccessToken", request, GenerateAccessTokenResponse)
20 |
--------------------------------------------------------------------------------
/src/kagglesdk/users/types/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Kaggle/kaggle-api/23981a5f0f61818a29bde74d0bcd8b86a8b610e8/src/kagglesdk/users/types/__init__.py
--------------------------------------------------------------------------------
/src/kagglesdk/users/types/account_service.py:
--------------------------------------------------------------------------------
1 | from datetime import timedelta
2 | import enum
3 | from kagglesdk.kaggle_object import *
4 | from kagglesdk.security.types.authentication import AuthorizationScope
5 | from typing import Optional, List
6 |
7 | class ApiVersion(enum.Enum):
8 | API_VERSION_UNSPECIFIED = 0
9 | API_VERSION_V1 = 1
10 | """Publicly available ('/api/v1' endpoints)."""
11 | API_VERSION_V2 = 2
12 | """Experimental, admin-only, internal ('/api/i' endpoints)."""
13 |
14 | class GenerateAccessTokenRequest(KaggleObject):
15 | r"""
16 | Attributes:
17 | refresh_token (str)
18 | Refresh token used to generate a short-lived restricted access token.
19 | If not specified current user credentials will be used to generate an
20 | unrestricted access token.
21 | api_version (ApiVersion)
22 | Version of the Kaggle API for which this token will be used.
23 | expiration_duration (timedelta)
24 | Token expiration.
25 | authorization_context (AuthorizationContext)
26 | On which context (such as a Kaggle notebook) this token can be used.
27 | authorization_scopes (AuthorizationScope)
28 | Set of scopes to further restrict the token. If 'refresh_token' is
29 | specified, these should be a subset of the scopes allowed by the
30 | 'refresh_token'.
31 | """
32 |
33 | def __init__(self):
34 | self._refresh_token = None
35 | self._api_version = ApiVersion.API_VERSION_UNSPECIFIED
36 | self._expiration_duration = None
37 | self._authorization_context = None
38 | self._authorization_scopes = []
39 | self._freeze()
40 |
41 | @property
42 | def refresh_token(self) -> str:
43 | r"""
44 | Refresh token used to generate a short-lived restricted access token.
45 | If not specified current user credentials will be used to generate an
46 | unrestricted access token.
47 | """
48 | return self._refresh_token or ""
49 |
50 | @refresh_token.setter
51 | def refresh_token(self, refresh_token: Optional[str]):
52 | if refresh_token is None:
53 | del self.refresh_token
54 | return
55 | if not isinstance(refresh_token, str):
56 | raise TypeError('refresh_token must be of type str')
57 | self._refresh_token = refresh_token
58 |
59 | @property
60 | def api_version(self) -> 'ApiVersion':
61 | """Version of the Kaggle API for which this token will be used."""
62 | return self._api_version
63 |
64 | @api_version.setter
65 | def api_version(self, api_version: 'ApiVersion'):
66 | if api_version is None:
67 | del self.api_version
68 | return
69 | if not isinstance(api_version, ApiVersion):
70 | raise TypeError('api_version must be of type ApiVersion')
71 | self._api_version = api_version
72 |
73 | @property
74 | def expiration_duration(self) -> timedelta:
75 | """Token expiration."""
76 | return self._expiration_duration
77 |
78 | @expiration_duration.setter
79 | def expiration_duration(self, expiration_duration: timedelta):
80 | if expiration_duration is None:
81 | del self.expiration_duration
82 | return
83 | if not isinstance(expiration_duration, timedelta):
84 | raise TypeError('expiration_duration must be of type timedelta')
85 | self._expiration_duration = expiration_duration
86 |
87 | @property
88 | def authorization_context(self) -> Optional['AuthorizationContext']:
89 | """On which context (such as a Kaggle notebook) this token can be used."""
90 | return self._authorization_context
91 |
92 | @authorization_context.setter
93 | def authorization_context(self, authorization_context: Optional['AuthorizationContext']):
94 | if authorization_context is None:
95 | del self.authorization_context
96 | return
97 | if not isinstance(authorization_context, AuthorizationContext):
98 | raise TypeError('authorization_context must be of type AuthorizationContext')
99 | self._authorization_context = authorization_context
100 |
101 | @property
102 | def authorization_scopes(self) -> Optional[List[Optional['AuthorizationScope']]]:
103 | r"""
104 | Set of scopes to further restrict the token. If 'refresh_token' is
105 | specified, these should be a subset of the scopes allowed by the
106 | 'refresh_token'.
107 | """
108 | return self._authorization_scopes
109 |
110 | @authorization_scopes.setter
111 | def authorization_scopes(self, authorization_scopes: Optional[List[Optional['AuthorizationScope']]]):
112 | if authorization_scopes is None:
113 | del self.authorization_scopes
114 | return
115 | if not isinstance(authorization_scopes, list):
116 | raise TypeError('authorization_scopes must be of type list')
117 | if not all([isinstance(t, AuthorizationScope) for t in authorization_scopes]):
118 | raise TypeError('authorization_scopes must contain only items of type AuthorizationScope')
119 | self._authorization_scopes = authorization_scopes
120 |
121 | def endpoint(self):
122 | path = '/api/v1/access-tokens/generate'
123 | return path.format_map(self.to_field_map(self))
124 |
125 |
126 | @staticmethod
127 | def method():
128 | return 'POST'
129 |
130 | @staticmethod
131 | def body_fields():
132 | return '*'
133 |
134 |
135 | class GenerateAccessTokenResponse(KaggleObject):
136 | r"""
137 | Attributes:
138 | token (str)
139 | """
140 |
141 | def __init__(self):
142 | self._token = ""
143 | self._freeze()
144 |
145 | @property
146 | def token(self) -> str:
147 | return self._token
148 |
149 | @token.setter
150 | def token(self, token: str):
151 | if token is None:
152 | del self.token
153 | return
154 | if not isinstance(token, str):
155 | raise TypeError('token must be of type str')
156 | self._token = token
157 |
158 |
159 | class AuthorizationContext(KaggleObject):
160 | r"""
161 | Attributes:
162 | kernel_session_id (int)
163 | If set, access token is restricted to be used only from the specified
164 | notebook session.
165 | """
166 |
167 | def __init__(self):
168 | self._kernel_session_id = None
169 | self._freeze()
170 |
171 | @property
172 | def kernel_session_id(self) -> int:
173 | r"""
174 | If set, access token is restricted to be used only from the specified
175 | notebook session.
176 | """
177 | return self._kernel_session_id or 0
178 |
179 | @kernel_session_id.setter
180 | def kernel_session_id(self, kernel_session_id: Optional[int]):
181 | if kernel_session_id is None:
182 | del self.kernel_session_id
183 | return
184 | if not isinstance(kernel_session_id, int):
185 | raise TypeError('kernel_session_id must be of type int')
186 | self._kernel_session_id = kernel_session_id
187 |
188 |
189 | GenerateAccessTokenRequest._fields = [
190 | FieldMetadata("refreshToken", "refresh_token", "_refresh_token", str, None, PredefinedSerializer(), optional=True),
191 | FieldMetadata("apiVersion", "api_version", "_api_version", ApiVersion, ApiVersion.API_VERSION_UNSPECIFIED, EnumSerializer()),
192 | FieldMetadata("expirationDuration", "expiration_duration", "_expiration_duration", timedelta, None, TimeDeltaSerializer()),
193 | FieldMetadata("authorizationContext", "authorization_context", "_authorization_context", AuthorizationContext, None, KaggleObjectSerializer()),
194 | FieldMetadata("authorizationScopes", "authorization_scopes", "_authorization_scopes", AuthorizationScope, [], ListSerializer(KaggleObjectSerializer())),
195 | ]
196 |
197 | GenerateAccessTokenResponse._fields = [
198 | FieldMetadata("token", "token", "_token", str, "", PredefinedSerializer()),
199 | ]
200 |
201 | AuthorizationContext._fields = [
202 | FieldMetadata("kernelSessionId", "kernel_session_id", "_kernel_session_id", int, None, PredefinedSerializer(), optional=True),
203 | ]
204 |
205 |
--------------------------------------------------------------------------------
/src/kagglesdk/users/types/users_enums.py:
--------------------------------------------------------------------------------
1 | import enum
2 |
3 | class UserAchievementTier(enum.Enum):
4 | NOVICE = 0
5 | CONTRIBUTOR = 1
6 | EXPERT = 2
7 | MASTER = 3
8 | GRANDMASTER = 4
9 | STAFF = 5
10 | """Kaggle admins"""
11 | ORGANIZATION = 11
12 | """Organizations"""
13 | RECALC = 21
14 | """Flag user for tier recalculation"""
15 |
16 | class CollaboratorType(enum.Enum):
17 | COLLABORATOR_TYPE_UNSPECIFIED = 0
18 | READER = 1
19 | WRITER = 2
20 | OWNER = 3
21 | ADMIN = 4
22 |
23 |
--------------------------------------------------------------------------------
/tests/dataset/data.csv:
--------------------------------------------------------------------------------
1 | id, fruit
2 | 1, apple
3 | 2, banana
4 | 3, citrus
5 | 4, apple
6 | 5, durian
--------------------------------------------------------------------------------
/tests/kernel/testing-x.ipynb:
--------------------------------------------------------------------------------
1 | {"metadata":{"kernelspec":{"language":"python","display_name":"Python 3","name":"python3"},"language_info":{"name":"python","version":"3.10.13","mimetype":"text/x-python","codemirror_mode":{"name":"ipython","version":3},"pygments_lexer":"ipython3","nbconvert_exporter":"python","file_extension":".py"},"kaggle":{"accelerator":"none","dataSources":[],"dockerImageVersionId":30646,"isInternetEnabled":true,"language":"python","sourceType":"notebook","isGpuEnabled":false}},"nbformat_minor":4,"nbformat":4,"cells":[{"cell_type":"code","source":"# This Python 3 environment comes with many helpful analytics libraries installed\n# It is defined by the kaggle/python Docker image: https://github.com/kaggle/docker-python\n# For example, here's several helpful packages to load\n\nimport numpy as np # linear algebra\nimport pandas as pd # data processing, CSV file I/O (e.g. pd.read_csv)\n\n# Input data files are available in the read-only \"../input/\" directory\n# For example, running this (by clicking run or pressing Shift+Enter) will list all files under the input directory\n\nimport os\nfor dirname, _, filenames in os.walk('/kaggle/input'):\n for filename in filenames:\n print(os.path.join(dirname, filename))\n\n# You can write up to 20GB to the current directory (/kaggle/working/) that gets preserved as output when you create a version using \"Save & Run All\" \n# You can also write temporary files to /kaggle/temp/, but they won't be saved outside of the current session","metadata":{"_uuid":"8f2839f25d086af736a60e9eeb907d3b93b6e0e5","_cell_guid":"b1076dfc-b9ad-4769-8c92-a6c4dae69d19","execution":{"iopub.status.busy":"2024-06-11T17:18:41.771461Z","iopub.execute_input":"2024-06-11T17:18:41.771892Z","iopub.status.idle":"2024-06-11T17:18:43.04124Z","shell.execute_reply.started":"2024-06-11T17:18:41.771858Z","shell.execute_reply":"2024-06-11T17:18:43.03999Z"},"trusted":true},"execution_count":null,"outputs":[]}]}
--------------------------------------------------------------------------------
/tests/model/instance/data.csv:
--------------------------------------------------------------------------------
1 | id, fruit
2 | 1, apple
3 | 2, banana
4 | 3, citrus
5 | 4, apple
6 | 5, durian
--------------------------------------------------------------------------------
/tests/model/instance/version/metadata.json:
--------------------------------------------------------------------------------
1 | {}
--------------------------------------------------------------------------------
/tests/test_commands.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # Verify all options are plumbed through to the MT.
4 | # Set envar KAGGLE_DEVELOPER to the Kaggle user name (probably already done).
5 | # When prompted to delete something, respond with "no".
6 |
7 | # Still need to adjust for assumptions about existing artifacts, like
8 | # the notebook "exercise-as-with"
9 |
10 | kaggle --version
11 |
12 | echo "kaggle competitions files"
13 | kaggle competitions files titanic --page-size=3 --page-token=abcd -v -q
14 | echo "kaggle competitions list"
15 | kaggle competitions list --group general --category featured --sort-by prize
16 | echo "kaggle competitions download"
17 | kaggle c download titanic -w -o -q
18 | kaggle c download titanic -f test.csv -p tost
19 | echo "kaggle competitions submit"
20 | kaggle c download house-prices-advanced-regression-techniques -f sample_submission.csv
21 | kaggle c submit house-prices-advanced-regression-techniques -f sample_submission.csv -m "Test message"
22 | echo "kaggle competitions submissions"
23 | kaggle c submissions house-prices-advanced-regression-techniques -v -q
24 | echo "kaggle competitions leaderboard"
25 | kaggle c leaderboard titanic -v -q -d -p leaders
26 | kaggle c leaderboard titanic -s > leaderboard.txt
27 | rm -r titanic.zip tost sample_submission.csv leaders leaderboard.txt
28 |
29 | echo "kaggle kernels list"
30 | kaggle k list -m -s Exercise --page-size 5 -p 2 -v --sort-by dateRun
31 | kaggle k list --parent $KAGGLE_DEVELOPER/exercise-lists
32 | kaggle k list --competition house-prices-advanced-regression-techniques --page-size 5
33 | kaggle k list --dataset dansbecker/home-data-for-ml-course --page-size 5
34 | kaggle k list --user $KAGGLE_DEVELOPER --language python --kernel-type notebook --output-type data
35 | echo "kaggle kernels files"
36 | kaggle kernels files kerneler/sqlite-global-default -v --page-size=1
37 | echo "kaggle kernels init"
38 | kaggle k init -p tests/kernel
39 | echo "kaggle kernels get"
40 | kaggle k get -p tests/kernel $KAGGLE_DEVELOPER/exercise-as-with -m
41 | kaggle k get --wp $KAGGLE_DEVELOPER/exercise-as-with
42 | sed -i s/exercise-as-with/exercise-delete/ tests/kernel/exercise-as-with.ipynb
43 | echo "kaggle kernels update"
44 | kaggle kernels update -p tests/kernel
45 | rm -f tests/kernel/exercise-as-with.ipynb tests/kernel/kernel-metadata.json exercise-as-with.ipynb
46 | echo "kaggle kernels status"
47 | kaggle k status kerneler/sqlite-global-default
48 | echo "kaggle kernels output"
49 | kaggle k output kerneler/sqlite-global-default -o
50 | echo "kaggle kernels delete"
51 | kaggle k delete $KAGGLE_DEVELOPER/exercise-delete
52 | kaggle k delete $KAGGLE_DEVELOPER/exercise-delete --yes
53 |
54 | echo "kaggle datasets list"
55 | kaggle d list --size 10
56 | kaggle d list -m
57 | kaggle d list --user oktayrdeki --csv
58 | kaggle d list --file-type csv --page 2 --sort-by updated -s student --min-size 13000 --max-size 15000
59 | kaggle d list --license odb --tags internet --search telco
60 | echo "kaggle datasets files"
61 | kaggle datasets files kerneler/brazilian-bird-observation-metadata-from-wikiaves --page-size=7 --page-token=abcd
62 | echo "kaggle datasets init"
63 | kaggle d init -p tests/dataset
64 | echo "kaggle datasets create"
65 | export SLUG=testing
66 | sed -i s/INSERT_TITLE_HERE/TitleHere/ tests/dataset/dataset-metadata.json
67 | sed -i s/INSERT_SLUG_HERE/$SLUG/ tests/dataset/dataset-metadata.json
68 | kaggle d create -p tests/dataset --public -q -t -r skip
69 | echo "kaggle datasets download"
70 | kaggle datasets download -d willianoliveiragibin/pixar-films
71 | kaggle d download goefft/public-datasets-with-file-types-and-columns -p tmp --unzip -o -q
72 | kaggle d download goefft/public-datasets-with-file-types-and-columns -f dataset_results.csv -w -q -o
73 | echo "kaggle datasets version"
74 | kaggle d version -m VersionNotesGoHere -p tests/dataset -q -t -r skip -d
75 | echo "kaggle datasets metadata"
76 | kaggle datasets metadata goefft/public-datasets-with-file-types-and-columns -p tests/dataset
77 | echo "kaggle datasets status"
78 | kaggle d status goefft/public-datasets-with-file-types-and-columns
79 | echo "kaggle datasets delete"
80 | kaggle d delete $KAGGLE_DEVELOPER/$SLUG
81 | kaggle d delete $KAGGLE_DEVELOPER/$SLUG --yes
82 | rm -rf tmp tests/dataset/dataset-metadata.json dataset_results.csv.zip dataset_results.csv pixar-films.zip
83 |
84 | echo "kaggle models init"
85 | mkdir tmp
86 | kaggle m init -p tmp
87 | echo "kaggle models list"
88 | kaggle m list --owner $KAGGLE_DEVELOPER --sort-by createTime -v
89 | kaggle m list -s gemini --page-size 5
90 | echo "kaggle models create"
91 | sed -i s/INSERT_OWNER_SLUG_HERE/$KAGGLE_DEVELOPER/ tmp/model-metadata.json
92 | sed -i s/INSERT_TITLE_HERE/ModelTitle/ tmp/model-metadata.json
93 | sed -i s/INSERT_SLUG_HERE/test-model/ tmp/model-metadata.json
94 | kaggle m create -p tmp
95 | echo "kaggle models update"
96 | kaggle m update -p tmp
97 | sleep 10
98 | echo "kaggle models get"
99 | kaggle m get -p tmp $KAGGLE_DEVELOPER/test-model
100 |
101 | echo "kaggle models instances init"
102 | kaggle m instances init -p tmp
103 | echo "kaggle models instances create"
104 | sed -i s/INSERT_OWNER_SLUG_HERE/$KAGGLE_DEVELOPER/ tmp/model-instance-metadata.json
105 | sed -i s/INSERT_EXISTING_MODEL_SLUG_HERE/test-model/ tmp/model-instance-metadata.json
106 | sed -i s/INSERT_INSTANCE_SLUG_HERE/main/ tmp/model-instance-metadata.json
107 | sed -i s/INSERT_FRAMEWORK_HERE/jax/ tmp/model-instance-metadata.json
108 | echo "a,b,c,d" > tmp/data.csv
109 | kaggle models instances create -p tmp -q -r skip
110 | sleep 10
111 | echo "kaggle models instances update"
112 | kaggle models instances update -p tmp
113 | sleep 10
114 | echo "kaggle models instances get"
115 | kaggle models instances get $KAGGLE_DEVELOPER/test-model/jax/main -p tmp
116 | echo "kaggle models instances files"
117 | kaggle models instances files $KAGGLE_DEVELOPER/test-model/jax/main -v --page-size 5
118 |
119 | echo "kaggle models instances versions files"
120 | kaggle models instances versions files google/gemma/pytorch/7b/2 -v --page-size=3 --page-token=abcd
121 | echo "kaggle models instances versions create"
122 | kaggle models instances versions create -p tmp -q -r skip -n VersionNotes $KAGGLE_DEVELOPER/test-model/jax/main
123 | echo "kaggle models instances versions download"
124 | kaggle models instances versions download -p tmp -q -f --untar $KAGGLE_DEVELOPER/test-model/jax/main/1
125 |
126 | rm -rf tmp
127 | rm -f results.csv
128 |
129 | echo "kaggle models instances versions delete"
130 | kaggle m instances versions delete $KAGGLE_DEVELOPER/test-model/jax/main/1
131 | kaggle m instances versions delete $KAGGLE_DEVELOPER/test-model/jax/main/1 -y
132 | echo "kaggle models instances delete"
133 | kaggle m instances delete $KAGGLE_DEVELOPER/test-model/jax/main
134 | kaggle m instances delete $KAGGLE_DEVELOPER/test-model/jax/main -y
135 | echo "kaggle models delete"
136 | kaggle m delete $KAGGLE_DEVELOPER/test-model
137 | kaggle m delete $KAGGLE_DEVELOPER/test-model -y
138 |
--------------------------------------------------------------------------------
/tools/GeneratePythonLibrary.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | set -e
4 |
5 | readonly LOCAL_ENV="local"
6 | readonly PROD_ENV="prod"
7 |
8 | function usage {
9 | echo "Usage : $1 [--install|--editable] [--test local|prod]"
10 | echo
11 | echo " --install (-i): Install the package locally."
12 | echo " --editable (-e): Make the installed package always reference your latest"
13 | echo " source code. Implies \"-i|--install\". Be aware that changes to the \"src\""
14 | echo " directory won't be reflected. See the README for details."
15 | echo " --test (-t) [$LOCAL_ENV|$PROD_ENV]: Run tests (unit_tests.py) against http://localhost"
16 | echo " or https://www.kaggle.com."
17 | echo " --watch (-w): Run the script in watch mode. It will watch the files under the \"template\""
18 | echo " directory and KaggleSwagger* files, and regenerate the package when there is a change."
19 | echo ""
20 | }
21 |
22 | INSTALL="no"
23 | INSTALL_EDITABLE="no"
24 | TEST=""
25 | WATCH="no"
26 |
27 | while [[ $# -gt 0 ]]; do
28 | arg="$1"
29 | case $arg in
30 | --install|-i)
31 | INSTALL="yes"
32 | ;;
33 | --editable|-e|--editable-install|--install-editable)
34 | INSTALL_EDITABLE="yes"
35 | ;;
36 | --test|-t)
37 | TEST=$2
38 | if [[ "$TEST" != "$LOCAL_ENV" ]] && [[ "$TEST" != "$PROD_ENV" ]]; then
39 | echo -e "Invalid value for arg \"$1\": \"$TEST\". Must be \"$LOCAL_ENV\" or \"$PROD_ENV\".\n"
40 | usage $0
41 | exit 0
42 | fi
43 | shift
44 | ;;
45 | --watch|-w)
46 | WATCH="yes"
47 | INSTALL_EDITABLE="yes"
48 | ;;
49 | --help|-h)
50 | usage $0
51 | exit 0
52 | ;;
53 | *)
54 | echo -e "Invalid argument: \"$1\".\n"
55 | usage $0
56 | exit 1
57 | ;;
58 | esac
59 | shift # Proceed with the next argument.
60 | done
61 |
62 | SELF_DIR=$(dirname $(realpath $0))
63 | SELF_DIR=${SELF_DIR%/*} # remove the last directory (tools) from the path
64 | cd $SELF_DIR
65 |
66 | KAGGLE_XDG_CONFIG_DIR="${XDG_CONFIG_HOME:-$HOME/.config}/kaggle"
67 | mkdir -p "$KAGGLE_XDG_CONFIG_DIR"
68 | KAGGLE_DEV_CONFIG_DIR=$(realpath "$KAGGLE_XDG_CONFIG_DIR/dev")
69 |
70 | trap cleanup EXIT
71 |
72 | function init {
73 | cd $SELF_DIR
74 |
75 | mkdir -p "$KAGGLE_XDG_CONFIG_DIR" && chmod 700 "$KAGGLE_XDG_CONFIG_DIR"
76 |
77 | echo "rm -f kaggle kagglesdk"
78 | rm -f kaggle kagglesdk
79 |
80 | create-local-creds
81 | }
82 |
83 | function reset {
84 | cd $SELF_DIR
85 |
86 | echo "run formatter"
87 | if [ -x "$(command -v black)" ]; then
88 | black .
89 | else
90 | echo "black is not installed on your system"
91 | fi
92 | }
93 |
94 | function create-local-creds {
95 | # Generate a separate dev credentials file (kaggle.json) to use when running against
96 | # http://localhost. This token only works when the webtier is running locally in debug
97 | # mode. When running against localhost, we set KAGGLE_CONFIG_DIR env var to
98 | # "~/.config/kaggle/dev/" so that the Python client searches for kaggle.json under this folder
99 | # and uses dummy dev creds
100 | local kaggle_config_file="$KAGGLE_DEV_CONFIG_DIR/kaggle.json"
101 | mkdir -p $KAGGLE_DEV_CONFIG_DIR
102 | local username=${KAGGLE_DEVELOPER:-$USER}
103 | echo "{\"username\":\"$username\",\"key\":\"local_api_token\"}" > $kaggle_config_file
104 | chmod 600 $kaggle_config_file
105 | }
106 |
107 | function copy-src {
108 | ln -s ./src/kaggle .
109 | ln -s ./src/kagglesdk .
110 | }
111 |
112 | function run-tests {
113 | if ! which kaggle > /dev/null 2> /dev/null; then
114 | echo "Warning: \"kaggle\" is not in PATH. Please add \"~/.local/bin\" to PATH in ~/.bashrc."
115 | return 0
116 | fi
117 |
118 | if [[ "$TEST" == "$LOCAL_ENV" ]]; then
119 | source tools/use-localhost.sh
120 | elif [[ "$TEST" == "$PROD_ENV" ]]; then
121 | source tools/use-prod.sh
122 | else
123 | return 0 # Nothing to do
124 | fi
125 |
126 | cd tests
127 | rm -f kaggle kagglesdk
128 | ln -s ../kagglesdk .
129 | ln -s ../kaggle .
130 | python3 unit_tests.py --failfast
131 | rm kaggle kagglesdk
132 | cd ..
133 | }
134 |
135 | function install-package {
136 | pip3 install --break-system-packages --require-hashes -r requirements.txt
137 | if [[ "$INSTALL_EDITABLE" == "yes" ]]; then
138 | pip3 install --break-system-packages --upgrade --editable .
139 | elif [[ "$INSTALL" == "yes" ]]; then
140 | pip3 install --break-system-packages --upgrade .
141 | fi
142 | }
143 |
144 | function cleanup {
145 | cd $SELF_DIR
146 | rm -rf tox.ini \
147 | test-requirements.txt \
148 | test \
149 | .travis.yml \
150 | git_push.sh \
151 | sample_submission.csv \
152 | ds_salaries.csv \
153 | test.csv \
154 | house-prices-advanced-regression-techniques.zip \
155 | data-science-salaries-2023.zip \
156 | kaggle/*.py-e \
157 | kaggle/api/*.py-e \
158 | kaggle/*.py.bak
159 | }
160 |
161 | function run {
162 | reset
163 |
164 | copy-src
165 | install-package
166 | run-tests
167 |
168 | echo -e "\nGenerated the \"kaggle\" package successfully!"
169 | }
170 |
171 | WATCHED_EVENTS="-e create -e modify -e delete"
172 |
173 | function watch-src {
174 | local watched_paths="$SELF_DIR/src"
175 |
176 | echo "Watching for changes under \"src\"..."
177 | while inotifywait -q -r $WATCHED_EVENTS --format "%e %w%f" $watched_paths; do
178 | echo "Copying changes..."
179 | copy-src
180 | echo "Done!"
181 | echo -e "\nWatching for changes under \"src\"..."
182 | done
183 | }
184 |
185 | function watch {
186 | # Run once and wait for changes.
187 | run
188 | # Disable --editable for the following runs as it is enough to do --editable once and modify under the
189 | # "src" folder files which will be then copied to the "kaggle" folder by "run" below on file changes.
190 | INSTALL_EDITABLE="no"
191 | INSTALL="no"
192 | TEST="no"
193 |
194 | echo
195 | watch-src
196 | local pid=$!
197 | wait $pid
198 | }
199 |
200 | init
201 |
202 | if [[ "$WATCH" == "yes" ]]; then
203 | watch
204 | else
205 | run
206 | fi
207 |
--------------------------------------------------------------------------------
/tools/cicd/integration-tests.yaml:
--------------------------------------------------------------------------------
1 | steps:
2 | # Access and store the secret in a file on a shared volume
3 | - name: 'gcr.io/cloud-builders/gcloud'
4 | id: 'download-secrets'
5 | script: |
6 | #!/usr/bin/env bash
7 | gcloud secrets versions access latest --secret=integration-tests --project=464139560241 > /root/secrets.sh
8 | volumes:
9 | - name: 'root'
10 | path: /root
11 |
12 |
13 | # Source the secrets and run integration tests using the built Python image
14 | - name: us-docker.pkg.dev/$PROJECT_ID/tools/hatch:$_PYTHON_VERSION
15 | id: integration-tests
16 | waitFor: ['download-secrets']
17 | script: |
18 | #!/usr/bin/env bash
19 | export KAGGLE_USERNAME
20 | export KAGGLE_KEY
21 | source /root/secrets.sh
22 | hatch run integration-test
23 | volumes:
24 | - name: 'root'
25 | path: /root
26 |
27 | substitutions:
28 | _PYTHON_VERSION: '3.11'
29 |
--------------------------------------------------------------------------------
/tools/releases/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM debian:12.0
2 |
3 | # Install some useful tools.
4 | RUN apt-get update -y && \
5 | apt-get install -y sudo \
6 | curl \
7 | unzip \
8 | default-jre \
9 | python3-pip
10 |
11 | COPY requirements.txt requirements.txt
12 | RUN cat requirements.txt
13 | RUN pip install --require-hashes -r requirements.txt --break-system-packages
14 |
15 | ENTRYPOINT ["/bin/bash"]
--------------------------------------------------------------------------------
/tools/releases/cloudbuild.yaml:
--------------------------------------------------------------------------------
1 | steps:
2 | # Import builder if exists.
3 | # Note: the reason to use bash is to be able to return an exit code 0 even if
4 | # the docker image doesn't exist yet.
5 | - name: "gcr.io/cloud-builders/docker"
6 | entrypoint: "bash"
7 | args:
8 | - "-c"
9 | - |
10 | docker pull ${_IMAGE_REPO_NAME}/${PROJECT_ID}/cli-releaser || exit 0
11 |
12 | # Build a modified version of the python image, including tools to build and
13 | # release the Kaggle CLI.
14 | # Use the previous built image as cache.
15 | - name: "gcr.io/cloud-builders/docker"
16 | dir: "tools/releases"
17 | args:
18 | - build
19 | - -f
20 | - Dockerfile
21 | - -t
22 | - ${_IMAGE_REPO_NAME}/${PROJECT_ID}/cli-releaser
23 | - --cache-from
24 | - ${_IMAGE_REPO_NAME}/${PROJECT_ID}/cli-releaser
25 | - .
26 |
27 | - name: "${_IMAGE_REPO_NAME}/${PROJECT_ID}/cli-releaser"
28 | id: generate-cli
29 | entrypoint: bash
30 | args:
31 | - "-c"
32 | - |
33 | mkdir -p ~/.kaggle/dev # Directory expected by following script
34 | ./tools/GeneratePythonLibrary.sh
35 | python3 -m pip install build --break-system-packages
36 | python3 -m build
37 | # Move the built CLI to a volume that will survive to next steps.
38 | mv dist /root/
39 | volumes:
40 | - name: "root"
41 | path: /root
42 |
43 | # Get the pypi token from Secret Manager, and create the ~/.pypirc file.
44 | - name: "gcr.io/cloud-builders/gcloud"
45 | id: create-credentials-pypirc
46 | entrypoint: "bash"
47 | args:
48 | - "-c"
49 | - |
50 | token_test=$(gcloud secrets versions access latest --secret=test-pypi-token)
51 | token=$(gcloud secrets versions access latest --secret=pypi-token)
52 | cat >~/.pypirc < $KAGGLE_CONFIG_FILE
25 | chmod 600 $KAGGLE_CONFIG_FILE
26 | echo "dev credentials created for username '$username'."
27 | echo "PLEASE VERIFY this matches your Kaggle username!"
28 | echo "If not, update the $KAGGLE_CONFIG_DIR/kaggle.json file manually to set your Kaggle username. You will only need to do this once."
29 | fi
30 |
--------------------------------------------------------------------------------
/tools/use-prod.sh:
--------------------------------------------------------------------------------
1 | # Source this file to run Kaggle Api V1 against https://www.kaggle.com.
2 |
3 | if [[ "$0" == "$BASH_SOURCE" ]]; then
4 | echo -e "Source this file to run kaggle api cli against prod:\n"
5 | echo "$ source use-prod.sh"
6 | echo
7 | exit 1
8 | fi
9 |
10 | unset KAGGLE_API_ENDPOINT
11 | unset KAGGLE_CONFIG_DIR
12 |
13 | if ! [[ -f "${XDG_CONFIG_HOME:-$HOME/.config}/kaggle/kaggle.json" ]]; then
14 | echo "Warning: Please download an API token at https://www.kaggle.com/settings and"
15 | echo "copy it to home directory to run the client against prod."
16 | echo
17 | else
18 | chmod 600 "${XDG_CONFIG_HOME:-$HOME/.config}/kaggle/kaggle.json"
19 | fi
--------------------------------------------------------------------------------